List of usage examples for org.antlr.v4.runtime Token getCharPositionInLine
int getCharPositionInLine();
From source file:org.eclipse.titan.common.parsers.ParserLogger.java
License:Open Source License
/** * Token info in string format for logging purpose * @param aToken token/*from ww w. j av a 2s. co m*/ * @param aTokenNameResolver resolver to get token name * @return <token name>: '<token text>', @<token index>, <line>:<column>[, channel=<channel>] * <br>where * <br><token index> starts from 0, * <br><line> starts from 1, * <br><column> starts from 0, * <br>channel info is provided if <channel> > 0 (hidden channel) */ private static String getTokenInfo(final Token aToken, final TokenNameResolver aTokenNameResolver) { final StringBuilder sb = new StringBuilder(); final int tokenType = aToken.getType(); final String tokenName = getTokenName(tokenType, aTokenNameResolver); sb.append(tokenName); sb.append(": "); sb.append("'"); sb.append(getEscapedTokenText(aToken)); sb.append("'"); sb.append(", @" + aToken.getTokenIndex()); sb.append(", " + aToken.getLine() + ":" + aToken.getCharPositionInLine()); if (aToken.getChannel() > 0) { sb.append(", channel="); sb.append(aToken.getChannel()); } return sb.toString(); }
From source file:org.eclipse.titan.designer.parsers.asn1parser.BlockLevelTokenStreamTracker.java
License:Open Source License
private boolean getBlock(Token first) { // return true if it were out of bond if (index >= oldList.size()) { tokens.add(first);//from w ww . j a v a 2s.com return true; } TokenWithIndexAndSubTokens result; Token t = oldList.get(index++); List<Token> tokenList = new ArrayList<Token>(); int nofUnclosedParanthesis = 1; while (t != null && t.getType() != Token.EOF && index < oldList.size()) { if (t.getType() == Asn1Lexer.BEGINCHAR) { nofUnclosedParanthesis++; } else if (t.getType() == Asn1Lexer.ENDCHAR) { nofUnclosedParanthesis--; if (nofUnclosedParanthesis == 0) { result = new TokenWithIndexAndSubTokens(Asn1Lexer.BLOCK, tokenList, sourceFile); result.setCharPositionInLine(first.getCharPositionInLine()); result.setLine(first.getLine()); result.setStartIndex(((TokenWithIndexAndSubTokens) first).getStopIndex()); result.setStopIndex(((TokenWithIndexAndSubTokens) t).getStopIndex()); result.setText(makeString(tokenList)); tokens.add(result); return false; } } if (!discardMask.contains(Integer.valueOf(t.getType()))) { tokenList.add(t); } t = oldList.get(index++); } result = new TokenWithIndexAndSubTokens(Asn1Lexer.BLOCK, tokenList, sourceFile); result.setCharPositionInLine(first.getCharPositionInLine()); result.setLine(first.getLine()); result.setStartIndex(((TokenWithIndexAndSubTokens) first).getStopIndex()); if (t != null) { result.setStopIndex(((TokenWithIndexAndSubTokens) t).getStopIndex()); } tokens.add(result); return true; }
From source file:org.eclipse.titan.designer.parsers.asn1parser.ModuleLevelTokenStreamTracker.java
License:Open Source License
private boolean getBlock(Token first) { // return true if EOF hit Token t;/* ww w. j a va 2 s . c o m*/ TokenWithIndexAndSubTokens result; t = getTokenSource().nextToken(); if (t instanceof WritableToken) { ((WritableToken) t).setTokenIndex(tokens.size()); } List<Token> tokenList = new ArrayList<Token>(); int nofUnclosedParanthesis = 1; while (t != null && t.getType() != Token.EOF) { if (t.getType() == Asn1Lexer.BEGINCHAR) { nofUnclosedParanthesis++; } else if (t.getType() == Asn1Lexer.ENDCHAR) { nofUnclosedParanthesis--; if (nofUnclosedParanthesis == 0) { result = new TokenWithIndexAndSubTokens(Asn1Lexer.BLOCK, tokenList, sourceFile); result.setCharPositionInLine(first.getCharPositionInLine()); result.setLine(first.getLine()); result.setStartIndex(((TokenWithIndexAndSubTokens) first).getStopIndex()); result.setStopIndex(((TokenWithIndexAndSubTokens) t).getStopIndex()); result.setText(makeString(tokenList)); tokens.add(result); return false; } } if (!discardMask.contains(Integer.valueOf(t.getType()))) { tokenList.add(new TokenWithIndexAndSubTokens(t)); } t = getTokenSource().nextToken(); } result = new TokenWithIndexAndSubTokens(Asn1Lexer.BLOCK, tokenList, sourceFile); result.setCharPositionInLine(first.getCharPositionInLine()); result.setLine(first.getLine()); result.setStartIndex(((TokenWithIndexAndSubTokens) first).getStopIndex()); if (t != null) { result.setStopIndex(((TokenWithIndexAndSubTokens) t).getStopIndex()); } tokens.add(result); return true; }
From source file:org.eclipse.titan.designer.parsers.asn1parser.SyntaxLevelTokenStreamTracker.java
License:Open Source License
private boolean getBlock(Token first) { if (index >= oldList.size()) { tokens.add(first);/*w w w. ja v a2s . c om*/ return true; } TokenWithIndexAndSubTokens result; Token t = oldList.get(index++); List<Token> tokenList = new ArrayList<Token>(); int nofUnclosedParanthesis = 1; while (t != null && t.getType() != Token.EOF && index < oldList.size()) { if (t.getType() == Asn1Lexer.SQUAREOPEN) { nofUnclosedParanthesis++; } else if (t.getType() == Asn1Lexer.SQUARECLOSE) { nofUnclosedParanthesis--; if (nofUnclosedParanthesis == 0) { result = new TokenWithIndexAndSubTokens(Asn1Lexer.BLOCK, tokenList, sourceFile); result.setCharPositionInLine(first.getCharPositionInLine()); result.setLine(first.getLine()); result.setStartIndex(((TokenWithIndexAndSubTokens) first).getStopIndex()); result.setStopIndex(((TokenWithIndexAndSubTokens) t).getStopIndex()); result.setText(makeString(tokenList)); tokens.add(result); return false; } } if (!discardMask.contains(Integer.valueOf(t.getType()))) { tokenList.add(t); } t = oldList.get(index++); } result = new TokenWithIndexAndSubTokens(Asn1Lexer.BLOCK, tokenList, sourceFile); result.setCharPositionInLine(first.getCharPositionInLine()); result.setLine(first.getLine()); result.setStartIndex(((TokenWithIndexAndSubTokens) first).getStopIndex()); if (t != null) { result.setStopIndex(((TokenWithIndexAndSubTokens) t).getStopIndex()); } tokens.add(result); return true; }
From source file:org.eclipse.titan.designer.parsers.ttcn3parser.ConditionalTransition.java
License:Open Source License
@Override public int fetch(int n) { if (fetchedEOF) { return 0; }/*w w w .j ava 2 s .c om*/ int i = 0; do { Token t; if (tokenStreamStack.isEmpty()) { t = getTokenSource().nextToken(); } else { t = tokenStreamStack.peek().getTokenSource().nextToken(); } if (t == null) { return 0; } int tokenType = t.getType(); if (tokenType == Ttcn3Lexer.PREPROCESSOR_DIRECTIVE) { lastPPDirectiveLocation = new Location(actualFile, t.getLine(), t.getStartIndex(), t.getStopIndex() + 1); // 1. the first # shall be discarded // 2. "\\\n" strings are removed, so multiline tokens, which are split by backslash are extracted to one line final String text = t.getText().substring(1).replace("\\\n", ""); Reader reader = new StringReader(text); CharStream charStream = new UnbufferedCharStream(reader); PreprocessorDirectiveLexer lexer = new PreprocessorDirectiveLexer(charStream); lexer.setTokenFactory(new PPDirectiveTokenFactory(true, t)); lexerListener = new PPListener(); lexer.removeErrorListeners(); lexer.addErrorListener(lexerListener); lexer.setLine(t.getLine()); lexer.setCharPositionInLine(t.getCharPositionInLine()); // 1. Previously it was UnbufferedTokenStream(lexer), but it was changed to BufferedTokenStream, because UnbufferedTokenStream seems to be unusable. It is an ANTLR 4 bug. // Read this: https://groups.google.com/forum/#!topic/antlr-discussion/gsAu-6d3pKU // pr_PatternChunk[StringBuilder builder, boolean[] uni]: // $builder.append($v.text); <-- exception is thrown here: java.lang.UnsupportedOperationException: interval 85..85 not in token buffer window: 86..341 // 2. Changed from BufferedTokenStream to CommonTokenStream, otherwise tokens with "-> channel(HIDDEN)" are not filtered out in lexer. final CommonTokenStream tokenStream = new CommonTokenStream(lexer); PreprocessorDirectiveParser localParser = new PreprocessorDirectiveParser(tokenStream); localParser.setBuildParseTree(false); parserListener = new PPListener(localParser); localParser.removeErrorListeners(); localParser.addErrorListener(parserListener); localParser.setIsActiveCode(condStateStack.isPassing()); localParser.setMacros(macros); localParser.setLine(t.getLine()); PreprocessorDirective ppDirective = null; ppDirective = localParser.pr_Directive().ppDirective; errorsStored.addAll(localParser.getErrorStorage()); warnings.addAll(localParser.getWarnings()); unsupportedConstructs.addAll(localParser.getUnsupportedConstructs()); if (ppDirective != null) { ppDirective.line = t.getLine(); if (ppDirective.isConditional()) { boolean preIsPassing = condStateStack.isPassing(); condStateStack.processDirective(ppDirective); boolean postIsPassing = condStateStack.isPassing(); if (preIsPassing != postIsPassing && tokenStreamStack.isEmpty() && getTokenSource() instanceof Ttcn3Lexer) { // included files are ignored because of ambiguity Location ppLocation = lastPPDirectiveLocation; if (ppLocation != null) { if (preIsPassing) { // switched to inactive: begin a new inactive location Location loc = new Location(actualFile, ppLocation.getLine(), ppLocation.getEndOffset(), ppLocation.getEndOffset()); inactiveCodeLocations.add(loc); } else { // switched to active: end the current inactive location int iclSize = inactiveCodeLocations.size(); if (iclSize > 0) { Location lastLocation = inactiveCodeLocations.get(iclSize - 1); lastLocation.setEndOffset(ppLocation.getOffset()); } } } } } else { // other directive types if (condStateStack.isPassing()) { // do something with the // directive switch (ppDirective.type) { case INCLUDE: { if (tokenStreamStack.size() > RECURSION_LIMIT) { // dumb but safe defense against infinite recursion, default value from gcc TITANMarker marker = new TITANMarker("Maximum #include recursion depth reached", ppDirective.line, -1, -1, IMarker.SEVERITY_ERROR, IMarker.PRIORITY_NORMAL); unsupportedConstructs.add(marker); } else { //TODO: Makes the Eclipse slow down processIncludeDirective(ppDirective); } } break; case ERROR: { String errorMessage = ppDirective.str == null ? "" : ppDirective.str; TITANMarker marker = new TITANMarker(errorMessage, ppDirective.line, -1, -1, IMarker.SEVERITY_ERROR, IMarker.PRIORITY_NORMAL); unsupportedConstructs.add(marker); } break; case WARNING: { String warningMessage = ppDirective.str == null ? "" : ppDirective.str; TITANMarker marker = new TITANMarker(warningMessage, ppDirective.line, -1, -1, IMarker.SEVERITY_WARNING, IMarker.PRIORITY_NORMAL); warnings.add(marker); } break; case LINECONTROL: case LINEMARKER: case PRAGMA: case NULL: { String reportPreference = Platform.getPreferencesService().getString( ProductConstants.PRODUCT_ID_DESIGNER, PreferenceConstants.REPORT_IGNORED_PREPROCESSOR_DIRECTIVES, GeneralConstants.WARNING, null); if (!GeneralConstants.IGNORE.equals(reportPreference)) { boolean isError = GeneralConstants.ERROR.equals(reportPreference); TITANMarker marker = new TITANMarker( MessageFormat.format("Preprocessor directive {0} is ignored", ppDirective.type.getName()), ppDirective.line, -1, -1, isError ? IMarker.SEVERITY_ERROR : IMarker.SEVERITY_WARNING, IMarker.PRIORITY_NORMAL); if (isError) { unsupportedConstructs.add(marker); } else { warnings.add(marker); } } } break; default: // ignore } } } } } else if (tokenType == Token.EOF) { if (!tokenStreamStack.isEmpty()) { // the included file ended, drop lexer // from the stack and ignore EOF token TokenStreamData tsd = tokenStreamStack.pop(); if (parser != null) { if (tokenStreamStack.isEmpty()) { parser.setActualFile(actualFile); parser.setLexer(actualLexer); } else { parser.setActualFile(tokenStreamStack.peek().file); parser.setLexer(tokenStreamStack.peek().lexer); } } if (tsd.reader != null) { try { tsd.reader.close(); } catch (IOException e) { } } } else { fetchedEOF = true; condStateStack.eofCheck(); tokens.add(t); ((CommonToken) t).setTokenIndex(tokens.size() - 1); --n; ++i; if (n == 0) { return i; } } } else { if (condStateStack.isPassing()) { tokens.add(t); ((CommonToken) t).setTokenIndex(tokens.size() - 1); --n; ++i; if (n == 0) { return i; } } } } while (true); }
From source file:org.elasticsearch.painless.ParserErrorStrategy.java
License:Apache License
@Override public void recover(Parser recognizer, RecognitionException re) { Token token = re.getOffendingToken(); String message;//from w ww. j ava 2 s . c om if (token == null) { message = "Error: no parse token found."; } else if (re instanceof InputMismatchException) { message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" + " unexpected token [" + getTokenErrorDisplay(token) + "]" + " was expecting one of [" + re.getExpectedTokens().toString(recognizer.getVocabulary()) + "]."; } else if (re instanceof NoViableAltException) { if (token.getType() == PainlessParser.EOF) { message = "Error: unexpected end of script."; } else { message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" + "invalid sequence of tokens near [" + getTokenErrorDisplay(token) + "]."; } } else { message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" + " unexpected token near [" + getTokenErrorDisplay(token) + "]."; } ParseException parseException = new ParseException(message, token == null ? -1 : token.getStartIndex()); parseException.initCause(re); throw new RuntimeException(parseException); }
From source file:org.elasticsearch.painless.ParserErrorStrategy.java
License:Apache License
@Override public Token recoverInline(Parser recognizer) throws RecognitionException { Token token = recognizer.getCurrentToken(); String message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" + " unexpected token [" + getTokenErrorDisplay(token) + "]" + " was expecting one of [" + recognizer.getExpectedTokens().toString(recognizer.getVocabulary()) + "]."; ParseException parseException = new ParseException(message, token.getStartIndex()); throw new RuntimeException(parseException); }
From source file:org.elasticsearch.plan.a.ParserErrorStrategy.java
License:Apache License
@Override public void recover(Parser recognizer, RecognitionException re) { Token token = re.getOffendingToken(); String message;/*from w w w .j a va 2 s .c om*/ if (token == null) { message = "Error: no parse token found."; } else if (re instanceof InputMismatchException) { message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" + " unexpected token [" + getTokenErrorDisplay(token) + "]" + " was expecting one of [" + re.getExpectedTokens().toString(recognizer.getVocabulary()) + "]."; } else if (re instanceof NoViableAltException) { if (token.getType() == PlanAParser.EOF) { message = "Error: unexpected end of script."; } else { message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" + "invalid sequence of tokens near [" + getTokenErrorDisplay(token) + "]."; } } else { message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" + " unexpected token near [" + getTokenErrorDisplay(token) + "]."; } ParseException parseException = new ParseException(message, token == null ? -1 : token.getStartIndex()); parseException.initCause(re); throw new RuntimeException(parseException); }
From source file:org.elasticsearch.xpack.sql.parser.AbstractBuilder.java
License:Open Source License
static Location source(Token token) { Check.notNull(token, "token is null"); return new Location(token.getLine(), token.getCharPositionInLine()); }
From source file:org.flightgear.clgen.listener.ErrorListener.java
License:Open Source License
@Override public void semanticError(final ParseTreeListener listener, final Token token, final String msg) { System.err.format("error at line %d: %s\n", token.getLine(), msg); System.err.println(errorContext(token.getLine(), token.getCharPositionInLine())); }