List of usage examples for org.antlr.v4.runtime Token EOF
int EOF
To view the source code for org.antlr.v4.runtime Token EOF.
Click Source Link
From source file:org.eclipse.titan.designer.AST.ASN1.Parameterised_Reference.java
License:Open Source License
/** * Fill the assignments according to the formal parameters * @param aAssPard (in) formal parameters for the conversion * @param aCompilationTimeStamp compilation timestamp *//*from ww w . j ava2 s. co m*/ private void addAssignments(final Ass_pard aAssPard, final CompilationTimeStamp aCompilationTimeStamp) { final List<FormalParameter_Helper> formalParameters = ((Ass_pard) aAssPard) .getFormalParameters(aCompilationTimeStamp); final int nofFormalParameters = formalParameters.size(); if (null != mBlock) { final List<List<Token>> actualParameters = new ArrayList<List<Token>>(); List<Token> temporalBuffer = new ArrayList<Token>(); /* splitting the list of actual parameters */ final List<Token> unprocessParameters = mBlock.getTokenList(); for (int i = 0; i < unprocessParameters.size(); i++) { Token tempToken = unprocessParameters.get(i); if (tempToken.getType() == Asn1Lexer.COMMA) { temporalBuffer.add(new TokenWithIndexAndSubTokens(Token.EOF)); actualParameters.add(temporalBuffer); temporalBuffer = new ArrayList<Token>(); } else { temporalBuffer.add(tempToken); } } if (!temporalBuffer.isEmpty()) { temporalBuffer.add(new TokenWithIndexAndSubTokens(Token.EOF)); actualParameters.add(temporalBuffer); } /* checking the number of parameters */ final int nofActualParameters = actualParameters.size(); if (nofActualParameters != nofFormalParameters) { location.reportSemanticError(MessageFormat.format(DIFFERENTPARAMETERNUMBERS, (nofActualParameters < nofFormalParameters) ? "few" : "many", nofFormalParameters, nofActualParameters)); } assignments = new ASN1Assignments(); for (int i = 0; i < nofFormalParameters; i++) { final Identifier tempIdentifier = formalParameters.get(i).identifier; ASN1Assignment temporalAssignment = null; if (i < nofActualParameters) { List<Token> temporalTokenBuffer = new ArrayList<Token>(); temporalTokenBuffer.add(formalParameters.get(i).formalParameterToken); Token temporalToken = formalParameters.get(i).governorToken; if (null != temporalToken) { temporalTokenBuffer.add(temporalToken); } temporalTokenBuffer.add(new TokenWithIndexAndSubTokens(Asn1Lexer.ASSIGNMENT)); temporalTokenBuffer.addAll(actualParameters.get(i)); // parse temp_tokenBuffer as an // assignment //List<ANTLRException> exceptions = null; final Asn1Parser parser = BlockLevelTokenStreamTracker .getASN1ParserForBlock(new Block(temporalTokenBuffer, location)); if (null != parser) { temporalAssignment = parser.pr_special_Assignment().assignment; List<SyntacticErrorStorage> errors = parser.getErrorStorage(); if (null != errors && !errors.isEmpty()) { isErroneous = true; temporalAssignment = null; for (int j = 0; j < errors.size(); j++) { ParserMarkerSupport.createOnTheFlyMixedMarker( (IFile) mBlock.getLocation().getFile(), errors.get(j), IMarker.SEVERITY_ERROR); } } } } if (null == temporalAssignment) { temporalAssignment = new Type_Assignment(tempIdentifier, null, null); } temporalAssignment.setLocation(location); assignments.addAssignment(temporalAssignment); } for (List<Token> temporalActualParamater : actualParameters) { temporalActualParamater.clear(); } actualParameters.clear(); } }
From source file:org.eclipse.titan.designer.parsers.asn1parser.BlockLevelTokenStreamTracker.java
License:Open Source License
@Override public int fetch(int n) { if (fetchedEOF) { return 0; }/* w w w. j av a 2s . co m*/ Token t; Token first; int i = 0; if (oldList == null || index >= oldList.size()) { tokens.add(new TokenWithIndexAndSubTokens(Token.EOF)); return ++i; } do { t = oldList.get(index++); first = t; if (t == null) { return 0; } else if (discardMask.contains(Integer.valueOf(t.getType()))) { // discard this Token assert (true); // TODO: remove it if it proves OK (in Lexer done it) } else if (t.getType() == Asn1Lexer.BEGINCHAR) { boolean exit = getBlock(first); if (exit) { return ++i; } ++i; --n; } else { tokens.add(t); ++i; --n; } } while (0 < n); return i; }
From source file:org.eclipse.titan.designer.parsers.asn1parser.BlockLevelTokenStreamTracker.java
License:Open Source License
private boolean getBlock(Token first) { // return true if it were out of bond if (index >= oldList.size()) { tokens.add(first);// w ww . j av a 2 s .com return true; } TokenWithIndexAndSubTokens result; Token t = oldList.get(index++); List<Token> tokenList = new ArrayList<Token>(); int nofUnclosedParanthesis = 1; while (t != null && t.getType() != Token.EOF && index < oldList.size()) { if (t.getType() == Asn1Lexer.BEGINCHAR) { nofUnclosedParanthesis++; } else if (t.getType() == Asn1Lexer.ENDCHAR) { nofUnclosedParanthesis--; if (nofUnclosedParanthesis == 0) { result = new TokenWithIndexAndSubTokens(Asn1Lexer.BLOCK, tokenList, sourceFile); result.setCharPositionInLine(first.getCharPositionInLine()); result.setLine(first.getLine()); result.setStartIndex(((TokenWithIndexAndSubTokens) first).getStopIndex()); result.setStopIndex(((TokenWithIndexAndSubTokens) t).getStopIndex()); result.setText(makeString(tokenList)); tokens.add(result); return false; } } if (!discardMask.contains(Integer.valueOf(t.getType()))) { tokenList.add(t); } t = oldList.get(index++); } result = new TokenWithIndexAndSubTokens(Asn1Lexer.BLOCK, tokenList, sourceFile); result.setCharPositionInLine(first.getCharPositionInLine()); result.setLine(first.getLine()); result.setStartIndex(((TokenWithIndexAndSubTokens) first).getStopIndex()); if (t != null) { result.setStopIndex(((TokenWithIndexAndSubTokens) t).getStopIndex()); } tokens.add(result); return true; }
From source file:org.eclipse.titan.designer.parsers.asn1parser.ModuleLevelTokenStreamTracker.java
License:Open Source License
@Override public int fetch(int n) { if (fetchedEOF) { return 0; }//from w w w. j av a 2 s . c o m Token t; Token first; int i = 0; do { t = getTokenSource().nextToken(); if (t instanceof WritableToken) { ((WritableToken) t).setTokenIndex(tokens.size()); } first = t; if (t.getType() == Token.EOF) { fetchedEOF = true; tokens.add(new TokenWithIndexAndSubTokens(t)); return ++i; } else if (discardMask.contains(Integer.valueOf(t.getType()))) { // discard this Token assert (true); } else if (t.getType() == Asn1Lexer.BEGINCHAR) { fetchedEOF = getBlock(first); if (fetchedEOF) { return ++i; } ++i; --n; } else { tokens.add(t); ++i; --n; } } while (0 < n); return i; }
From source file:org.eclipse.titan.designer.parsers.asn1parser.ModuleLevelTokenStreamTracker.java
License:Open Source License
private boolean getBlock(Token first) { // return true if EOF hit Token t;/*w w w.ja v a 2 s. co m*/ TokenWithIndexAndSubTokens result; t = getTokenSource().nextToken(); if (t instanceof WritableToken) { ((WritableToken) t).setTokenIndex(tokens.size()); } List<Token> tokenList = new ArrayList<Token>(); int nofUnclosedParanthesis = 1; while (t != null && t.getType() != Token.EOF) { if (t.getType() == Asn1Lexer.BEGINCHAR) { nofUnclosedParanthesis++; } else if (t.getType() == Asn1Lexer.ENDCHAR) { nofUnclosedParanthesis--; if (nofUnclosedParanthesis == 0) { result = new TokenWithIndexAndSubTokens(Asn1Lexer.BLOCK, tokenList, sourceFile); result.setCharPositionInLine(first.getCharPositionInLine()); result.setLine(first.getLine()); result.setStartIndex(((TokenWithIndexAndSubTokens) first).getStopIndex()); result.setStopIndex(((TokenWithIndexAndSubTokens) t).getStopIndex()); result.setText(makeString(tokenList)); tokens.add(result); return false; } } if (!discardMask.contains(Integer.valueOf(t.getType()))) { tokenList.add(new TokenWithIndexAndSubTokens(t)); } t = getTokenSource().nextToken(); } result = new TokenWithIndexAndSubTokens(Asn1Lexer.BLOCK, tokenList, sourceFile); result.setCharPositionInLine(first.getCharPositionInLine()); result.setLine(first.getLine()); result.setStartIndex(((TokenWithIndexAndSubTokens) first).getStopIndex()); if (t != null) { result.setStopIndex(((TokenWithIndexAndSubTokens) t).getStopIndex()); } tokens.add(result); return true; }
From source file:org.eclipse.titan.designer.parsers.asn1parser.SyntaxLevelTokenStreamTracker.java
License:Open Source License
@Override public int fetch(int n) { if (fetchedEOF) { return 0; }//from w w w. j a v a 2 s . c o m Token first; int i = 0; if (oldList == null || index >= oldList.size()) { tokens.add(new TokenWithIndexAndSubTokens(Token.EOF)); return ++i; } do { Token t = oldList.get(index++); //t = tokens.get(index++); first = t; if (t == null) { return 0; } else if (discardMask.contains(Integer.valueOf(t.getType()))) { // discard this Token } else if (t.getType() == Asn1Lexer.SQUAREOPEN) { boolean exit = getBlock(first); if (exit) { return ++i; } ++i; --n; } else { tokens.add(t); ++i; --n; } } while (0 < n); return i; }
From source file:org.eclipse.titan.designer.parsers.asn1parser.SyntaxLevelTokenStreamTracker.java
License:Open Source License
private boolean getBlock(Token first) { if (index >= oldList.size()) { tokens.add(first);//from www. j a v a 2s . c o m return true; } TokenWithIndexAndSubTokens result; Token t = oldList.get(index++); List<Token> tokenList = new ArrayList<Token>(); int nofUnclosedParanthesis = 1; while (t != null && t.getType() != Token.EOF && index < oldList.size()) { if (t.getType() == Asn1Lexer.SQUAREOPEN) { nofUnclosedParanthesis++; } else if (t.getType() == Asn1Lexer.SQUARECLOSE) { nofUnclosedParanthesis--; if (nofUnclosedParanthesis == 0) { result = new TokenWithIndexAndSubTokens(Asn1Lexer.BLOCK, tokenList, sourceFile); result.setCharPositionInLine(first.getCharPositionInLine()); result.setLine(first.getLine()); result.setStartIndex(((TokenWithIndexAndSubTokens) first).getStopIndex()); result.setStopIndex(((TokenWithIndexAndSubTokens) t).getStopIndex()); result.setText(makeString(tokenList)); tokens.add(result); return false; } } if (!discardMask.contains(Integer.valueOf(t.getType()))) { tokenList.add(t); } t = oldList.get(index++); } result = new TokenWithIndexAndSubTokens(Asn1Lexer.BLOCK, tokenList, sourceFile); result.setCharPositionInLine(first.getCharPositionInLine()); result.setLine(first.getLine()); result.setStartIndex(((TokenWithIndexAndSubTokens) first).getStopIndex()); if (t != null) { result.setStopIndex(((TokenWithIndexAndSubTokens) t).getStopIndex()); } tokens.add(result); return true; }
From source file:org.eclipse.titan.designer.parsers.ttcn3parser.PPDirectiveTokenFactory.java
License:Open Source License
public PPDirectiveTokenFactory() { this.copyText = false; this.token = new CommonToken(Token.EOF); }
From source file:org.eclipse.titan.designer.parsers.ttcn3parser.ConditionalTransition.java
License:Open Source License
@Override public int fetch(int n) { if (fetchedEOF) { return 0; }/*from w w w. j a v a 2 s .com*/ int i = 0; do { Token t; if (tokenStreamStack.isEmpty()) { t = getTokenSource().nextToken(); } else { t = tokenStreamStack.peek().getTokenSource().nextToken(); } if (t == null) { return 0; } int tokenType = t.getType(); if (tokenType == Ttcn3Lexer.PREPROCESSOR_DIRECTIVE) { lastPPDirectiveLocation = new Location(actualFile, t.getLine(), t.getStartIndex(), t.getStopIndex() + 1); // 1. the first # shall be discarded // 2. "\\\n" strings are removed, so multiline tokens, which are split by backslash are extracted to one line final String text = t.getText().substring(1).replace("\\\n", ""); Reader reader = new StringReader(text); CharStream charStream = new UnbufferedCharStream(reader); PreprocessorDirectiveLexer lexer = new PreprocessorDirectiveLexer(charStream); lexer.setTokenFactory(new PPDirectiveTokenFactory(true, t)); lexerListener = new PPListener(); lexer.removeErrorListeners(); lexer.addErrorListener(lexerListener); lexer.setLine(t.getLine()); lexer.setCharPositionInLine(t.getCharPositionInLine()); // 1. Previously it was UnbufferedTokenStream(lexer), but it was changed to BufferedTokenStream, because UnbufferedTokenStream seems to be unusable. It is an ANTLR 4 bug. // Read this: https://groups.google.com/forum/#!topic/antlr-discussion/gsAu-6d3pKU // pr_PatternChunk[StringBuilder builder, boolean[] uni]: // $builder.append($v.text); <-- exception is thrown here: java.lang.UnsupportedOperationException: interval 85..85 not in token buffer window: 86..341 // 2. Changed from BufferedTokenStream to CommonTokenStream, otherwise tokens with "-> channel(HIDDEN)" are not filtered out in lexer. final CommonTokenStream tokenStream = new CommonTokenStream(lexer); PreprocessorDirectiveParser localParser = new PreprocessorDirectiveParser(tokenStream); localParser.setBuildParseTree(false); parserListener = new PPListener(localParser); localParser.removeErrorListeners(); localParser.addErrorListener(parserListener); localParser.setIsActiveCode(condStateStack.isPassing()); localParser.setMacros(macros); localParser.setLine(t.getLine()); PreprocessorDirective ppDirective = null; ppDirective = localParser.pr_Directive().ppDirective; errorsStored.addAll(localParser.getErrorStorage()); warnings.addAll(localParser.getWarnings()); unsupportedConstructs.addAll(localParser.getUnsupportedConstructs()); if (ppDirective != null) { ppDirective.line = t.getLine(); if (ppDirective.isConditional()) { boolean preIsPassing = condStateStack.isPassing(); condStateStack.processDirective(ppDirective); boolean postIsPassing = condStateStack.isPassing(); if (preIsPassing != postIsPassing && tokenStreamStack.isEmpty() && getTokenSource() instanceof Ttcn3Lexer) { // included files are ignored because of ambiguity Location ppLocation = lastPPDirectiveLocation; if (ppLocation != null) { if (preIsPassing) { // switched to inactive: begin a new inactive location Location loc = new Location(actualFile, ppLocation.getLine(), ppLocation.getEndOffset(), ppLocation.getEndOffset()); inactiveCodeLocations.add(loc); } else { // switched to active: end the current inactive location int iclSize = inactiveCodeLocations.size(); if (iclSize > 0) { Location lastLocation = inactiveCodeLocations.get(iclSize - 1); lastLocation.setEndOffset(ppLocation.getOffset()); } } } } } else { // other directive types if (condStateStack.isPassing()) { // do something with the // directive switch (ppDirective.type) { case INCLUDE: { if (tokenStreamStack.size() > RECURSION_LIMIT) { // dumb but safe defense against infinite recursion, default value from gcc TITANMarker marker = new TITANMarker("Maximum #include recursion depth reached", ppDirective.line, -1, -1, IMarker.SEVERITY_ERROR, IMarker.PRIORITY_NORMAL); unsupportedConstructs.add(marker); } else { //TODO: Makes the Eclipse slow down processIncludeDirective(ppDirective); } } break; case ERROR: { String errorMessage = ppDirective.str == null ? "" : ppDirective.str; TITANMarker marker = new TITANMarker(errorMessage, ppDirective.line, -1, -1, IMarker.SEVERITY_ERROR, IMarker.PRIORITY_NORMAL); unsupportedConstructs.add(marker); } break; case WARNING: { String warningMessage = ppDirective.str == null ? "" : ppDirective.str; TITANMarker marker = new TITANMarker(warningMessage, ppDirective.line, -1, -1, IMarker.SEVERITY_WARNING, IMarker.PRIORITY_NORMAL); warnings.add(marker); } break; case LINECONTROL: case LINEMARKER: case PRAGMA: case NULL: { String reportPreference = Platform.getPreferencesService().getString( ProductConstants.PRODUCT_ID_DESIGNER, PreferenceConstants.REPORT_IGNORED_PREPROCESSOR_DIRECTIVES, GeneralConstants.WARNING, null); if (!GeneralConstants.IGNORE.equals(reportPreference)) { boolean isError = GeneralConstants.ERROR.equals(reportPreference); TITANMarker marker = new TITANMarker( MessageFormat.format("Preprocessor directive {0} is ignored", ppDirective.type.getName()), ppDirective.line, -1, -1, isError ? IMarker.SEVERITY_ERROR : IMarker.SEVERITY_WARNING, IMarker.PRIORITY_NORMAL); if (isError) { unsupportedConstructs.add(marker); } else { warnings.add(marker); } } } break; default: // ignore } } } } } else if (tokenType == Token.EOF) { if (!tokenStreamStack.isEmpty()) { // the included file ended, drop lexer // from the stack and ignore EOF token TokenStreamData tsd = tokenStreamStack.pop(); if (parser != null) { if (tokenStreamStack.isEmpty()) { parser.setActualFile(actualFile); parser.setLexer(actualLexer); } else { parser.setActualFile(tokenStreamStack.peek().file); parser.setLexer(tokenStreamStack.peek().lexer); } } if (tsd.reader != null) { try { tsd.reader.close(); } catch (IOException e) { } } } else { fetchedEOF = true; condStateStack.eofCheck(); tokens.add(t); ((CommonToken) t).setTokenIndex(tokens.size() - 1); --n; ++i; if (n == 0) { return i; } } } else { if (condStateStack.isPassing()) { tokens.add(t); ((CommonToken) t).setTokenIndex(tokens.size() - 1); --n; ++i; if (n == 0) { return i; } } } } while (true); }
From source file:org.harctoolbox.irp.ErrorStrategy.java
License:BSD License
/** * Make sure we don't attempt to recover from problems in subrules. * * @param recognizer/*from w w w .j a va 2 s. c o m*/ */ @Override public void sync(Parser recognizer) { ATNState s = recognizer.getInterpreter().atn.states.get(recognizer.getState()); // System.err.println("sync @ "+s.stateNumber+"="+s.getClass().getSimpleName()); // If already recovering, don't try to sync if (inErrorRecoveryMode(recognizer)) { return; } TokenStream tokens = recognizer.getInputStream(); int la = tokens.LA(1); // try cheaper subset first; might get lucky. seems to shave a wee bit off if (recognizer.getATN().nextTokens(s).contains(la) || la == Token.EOF) return; // Return but don't end recovery. only do that upon valid token match if (recognizer.isExpectedToken(la)) { return; } switch (s.getStateType()) { /*case ATNState.BLOCK_START: case ATNState.STAR_BLOCK_START: case ATNState.PLUS_BLOCK_START: case ATNState.STAR_LOOP_ENTRY: // report error and recover if possible if (singleTokenDeletion(recognizer) != null) { return; } //throw new InputMismatchException(recognizer); */ case ATNState.PLUS_LOOP_BACK: case ATNState.STAR_LOOP_BACK: // System.err.println("at loop back: "+s.getClass().getSimpleName()); reportUnwantedToken(recognizer); /*IntervalSet expecting = recognizer.getExpectedTokens(); IntervalSet whatFollowsLoopIterationOrRule = expecting.or(getErrorRecoverySet(recognizer)); consumeUntil(recognizer, whatFollowsLoopIterationOrRule);*/ break; default: // do nothing if we can't identify the exact kind of ATN state break; } }