Example usage for org.antlr.v4.runtime Token getText

List of usage examples for org.antlr.v4.runtime Token getText

Introduction

In this page you can find the example usage for org.antlr.v4.runtime Token getText.

Prototype

String getText();

Source Link

Document

Get the text of the token.

Usage

From source file:org.eclipse.titan.designer.parsers.asn1parser.BlockLevelTokenStreamTracker.java

License:Open Source License

private String makeString(List<Token> list) {
    StringBuilder text = new StringBuilder();
    for (Token t : list) {
        text.append(t.getText());
    }//from w w  w.j  a  v  a  2s . c  om
    return text.toString();
}

From source file:org.eclipse.titan.designer.parsers.ttcn3parser.ConditionalTransition.java

License:Open Source License

@Override
public int fetch(int n) {
    if (fetchedEOF) {
        return 0;
    }// w w  w  . j  a  v  a  2s . c  om
    int i = 0;
    do {
        Token t;
        if (tokenStreamStack.isEmpty()) {
            t = getTokenSource().nextToken();
        } else {
            t = tokenStreamStack.peek().getTokenSource().nextToken();
        }
        if (t == null) {
            return 0;
        }
        int tokenType = t.getType();
        if (tokenType == Ttcn3Lexer.PREPROCESSOR_DIRECTIVE) {
            lastPPDirectiveLocation = new Location(actualFile, t.getLine(), t.getStartIndex(),
                    t.getStopIndex() + 1);
            // 1. the first # shall be discarded
            // 2. "\\\n" strings are removed, so multiline tokens, which are split by backslash are extracted to one line
            final String text = t.getText().substring(1).replace("\\\n", "");
            Reader reader = new StringReader(text);
            CharStream charStream = new UnbufferedCharStream(reader);
            PreprocessorDirectiveLexer lexer = new PreprocessorDirectiveLexer(charStream);
            lexer.setTokenFactory(new PPDirectiveTokenFactory(true, t));
            lexerListener = new PPListener();
            lexer.removeErrorListeners();
            lexer.addErrorListener(lexerListener);
            lexer.setLine(t.getLine());
            lexer.setCharPositionInLine(t.getCharPositionInLine());

            // 1. Previously it was UnbufferedTokenStream(lexer), but it was changed to BufferedTokenStream, because UnbufferedTokenStream seems to be unusable. It is an ANTLR 4 bug.
            // Read this: https://groups.google.com/forum/#!topic/antlr-discussion/gsAu-6d3pKU
            // pr_PatternChunk[StringBuilder builder, boolean[] uni]:
            //   $builder.append($v.text); <-- exception is thrown here: java.lang.UnsupportedOperationException: interval 85..85 not in token buffer window: 86..341
            // 2. Changed from BufferedTokenStream to CommonTokenStream, otherwise tokens with "-> channel(HIDDEN)" are not filtered out in lexer.
            final CommonTokenStream tokenStream = new CommonTokenStream(lexer);

            PreprocessorDirectiveParser localParser = new PreprocessorDirectiveParser(tokenStream);
            localParser.setBuildParseTree(false);
            parserListener = new PPListener(localParser);
            localParser.removeErrorListeners();
            localParser.addErrorListener(parserListener);
            localParser.setIsActiveCode(condStateStack.isPassing());
            localParser.setMacros(macros);
            localParser.setLine(t.getLine());
            PreprocessorDirective ppDirective = null;
            ppDirective = localParser.pr_Directive().ppDirective;
            errorsStored.addAll(localParser.getErrorStorage());
            warnings.addAll(localParser.getWarnings());
            unsupportedConstructs.addAll(localParser.getUnsupportedConstructs());
            if (ppDirective != null) {
                ppDirective.line = t.getLine();
                if (ppDirective.isConditional()) {
                    boolean preIsPassing = condStateStack.isPassing();
                    condStateStack.processDirective(ppDirective);
                    boolean postIsPassing = condStateStack.isPassing();
                    if (preIsPassing != postIsPassing && tokenStreamStack.isEmpty()
                            && getTokenSource() instanceof Ttcn3Lexer) {
                        // included files are ignored because of ambiguity
                        Location ppLocation = lastPPDirectiveLocation;
                        if (ppLocation != null) {
                            if (preIsPassing) {
                                // switched to inactive: begin a new inactive location
                                Location loc = new Location(actualFile, ppLocation.getLine(),
                                        ppLocation.getEndOffset(), ppLocation.getEndOffset());
                                inactiveCodeLocations.add(loc);
                            } else {
                                // switched to active: end the current inactive location
                                int iclSize = inactiveCodeLocations.size();
                                if (iclSize > 0) {
                                    Location lastLocation = inactiveCodeLocations.get(iclSize - 1);
                                    lastLocation.setEndOffset(ppLocation.getOffset());
                                }
                            }
                        }
                    }
                } else {
                    // other directive types
                    if (condStateStack.isPassing()) {
                        // do something with the
                        // directive
                        switch (ppDirective.type) {
                        case INCLUDE: {
                            if (tokenStreamStack.size() > RECURSION_LIMIT) {
                                // dumb but safe defense against infinite recursion, default value from gcc
                                TITANMarker marker = new TITANMarker("Maximum #include recursion depth reached",
                                        ppDirective.line, -1, -1, IMarker.SEVERITY_ERROR,
                                        IMarker.PRIORITY_NORMAL);
                                unsupportedConstructs.add(marker);
                            } else {
                                //TODO: Makes the Eclipse slow down
                                processIncludeDirective(ppDirective);
                            }
                        }
                            break;
                        case ERROR: {
                            String errorMessage = ppDirective.str == null ? "" : ppDirective.str;
                            TITANMarker marker = new TITANMarker(errorMessage, ppDirective.line, -1, -1,
                                    IMarker.SEVERITY_ERROR, IMarker.PRIORITY_NORMAL);
                            unsupportedConstructs.add(marker);
                        }
                            break;
                        case WARNING: {
                            String warningMessage = ppDirective.str == null ? "" : ppDirective.str;
                            TITANMarker marker = new TITANMarker(warningMessage, ppDirective.line, -1, -1,
                                    IMarker.SEVERITY_WARNING, IMarker.PRIORITY_NORMAL);
                            warnings.add(marker);
                        }
                            break;
                        case LINECONTROL:
                        case LINEMARKER:
                        case PRAGMA:
                        case NULL: {
                            String reportPreference = Platform.getPreferencesService().getString(
                                    ProductConstants.PRODUCT_ID_DESIGNER,
                                    PreferenceConstants.REPORT_IGNORED_PREPROCESSOR_DIRECTIVES,
                                    GeneralConstants.WARNING, null);
                            if (!GeneralConstants.IGNORE.equals(reportPreference)) {
                                boolean isError = GeneralConstants.ERROR.equals(reportPreference);
                                TITANMarker marker = new TITANMarker(
                                        MessageFormat.format("Preprocessor directive {0} is ignored",
                                                ppDirective.type.getName()),
                                        ppDirective.line, -1, -1,
                                        isError ? IMarker.SEVERITY_ERROR : IMarker.SEVERITY_WARNING,
                                        IMarker.PRIORITY_NORMAL);
                                if (isError) {
                                    unsupportedConstructs.add(marker);
                                } else {
                                    warnings.add(marker);
                                }
                            }
                        }
                            break;
                        default:
                            // ignore
                        }
                    }
                }
            }
        } else if (tokenType == Token.EOF) {
            if (!tokenStreamStack.isEmpty()) {
                // the included file ended, drop lexer
                // from the stack and ignore EOF token
                TokenStreamData tsd = tokenStreamStack.pop();
                if (parser != null) {
                    if (tokenStreamStack.isEmpty()) {
                        parser.setActualFile(actualFile);
                        parser.setLexer(actualLexer);
                    } else {
                        parser.setActualFile(tokenStreamStack.peek().file);
                        parser.setLexer(tokenStreamStack.peek().lexer);
                    }
                }
                if (tsd.reader != null) {
                    try {
                        tsd.reader.close();
                    } catch (IOException e) {
                    }
                }
            } else {
                fetchedEOF = true;
                condStateStack.eofCheck();
                tokens.add(t);
                ((CommonToken) t).setTokenIndex(tokens.size() - 1);
                --n;
                ++i;
                if (n == 0) {
                    return i;
                }
            }
        } else {
            if (condStateStack.isPassing()) {
                tokens.add(t);
                ((CommonToken) t).setTokenIndex(tokens.size() - 1);
                --n;
                ++i;
                if (n == 0) {
                    return i;
                }
            }
        }
    } while (true);
}

From source file:org.elasticsearch.xpack.sql.parser.LogicalPlanBuilder.java

License:Open Source License

@Override
public LogicalPlan visitQueryNoWith(QueryNoWithContext ctx) {
    LogicalPlan plan = plan(ctx.queryTerm());

    if (!ctx.orderBy().isEmpty()) {
        plan = new OrderBy(source(ctx.ORDER()), plan, visitList(ctx.orderBy(), Order.class));
    }/*  ww  w  .j  a va2  s  .  c  o m*/

    LimitClauseContext limitClause = ctx.limitClause();
    if (limitClause != null) {
        Token limit = limitClause.limit;
        if (limit != null && limitClause.INTEGER_VALUE() != null) {
            plan = new Limit(source(limitClause),
                    new Literal(source(limitClause), Integer.parseInt(limit.getText()), DataType.INTEGER),
                    plan);
        }
    }

    return plan;
}

From source file:org.elasticsearch.xpack.sql.parser.SqlParser.java

License:Open Source License

private <T> T invokeParser(String sql, List<SqlTypedParamValue> params,
        Function<SqlBaseParser, ParserRuleContext> parseFunction,
        BiFunction<AstBuilder, ParserRuleContext, T> visitor) {
    SqlBaseLexer lexer = new SqlBaseLexer(new CaseInsensitiveStream(sql));

    lexer.removeErrorListeners();/* w  w w  . j  av a2s  .  c o m*/
    lexer.addErrorListener(ERROR_LISTENER);

    Map<Token, SqlTypedParamValue> paramTokens = new HashMap<>();
    TokenSource tokenSource = new ParametrizedTokenSource(lexer, paramTokens, params);

    CommonTokenStream tokenStream = new CommonTokenStream(tokenSource);
    SqlBaseParser parser = new SqlBaseParser(tokenStream);

    parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames())));

    parser.removeErrorListeners();
    parser.addErrorListener(ERROR_LISTENER);

    parser.getInterpreter().setPredictionMode(PredictionMode.SLL);

    if (DEBUG) {
        debug(parser);
        tokenStream.fill();

        for (Token t : tokenStream.getTokens()) {
            String symbolicName = SqlBaseLexer.VOCABULARY.getSymbolicName(t.getType());
            String literalName = SqlBaseLexer.VOCABULARY.getLiteralName(t.getType());
            log.info(format(Locale.ROOT, "  %-15s '%s'", symbolicName == null ? literalName : symbolicName,
                    t.getText()));
        }
        ;
    }

    ParserRuleContext tree = parseFunction.apply(parser);

    if (DEBUG) {
        log.info("Parse tree {} " + tree.toStringTree());
    }

    return visitor.apply(new AstBuilder(paramTokens), tree);
}

From source file:org.flightgear.clgen.listener.ItemListener.java

License:Open Source License

private Symbol lookup(final Token token) {
    Symbol symbol = symbolTable.lookup(item.getName(), token.getText());
    if (symbol == null) {
        error(token, "Alias '%s' is not defined in item '%s'", token.getText(), item.getName());
        return null;
    }/*from  ww w .  j  ava2 s  . c  o m*/
    return symbol;
}

From source file:org.hibernate.sqm.parser.hql.internal.HqlParseTreeBuilder.java

License:Apache License

public HqlParser parseHql(String hql) {
    // Build the lexer
    HqlLexer hqlLexer = new HqlLexer(new ANTLRInputStream(hql));

    // Build the parser...
    final HqlParser parser = new HqlParser(new CommonTokenStream(hqlLexer)) {
        @Override/* w ww.j  a v  a  2 s  .  c om*/
        protected void logUseOfReservedWordAsIdentifier(Token token) {
            log.debugf("Encountered use of reserved word as identifier : " + token.getText());
        }
    };

    HqlParseTreePrinter.logParseTree(parser);

    return parser;
}

From source file:org.hibernate.sqm.parser.hql.internal.SemanticQueryBuilder.java

License:Apache License

private String interpretResultIdentifier(HqlParser.ResultIdentifierContext resultIdentifierContext) {
    if (resultIdentifierContext != null) {
        final String explicitAlias;
        if (resultIdentifierContext.AS() != null) {
            final Token aliasToken = resultIdentifierContext.identifier().getStart();
            explicitAlias = aliasToken.getText();

            if (aliasToken.getType() != HqlParser.IDENTIFIER) {
                // we have a reserved word used as an identification variable.
                if (parsingContext.getConsumerContext().useStrictJpaCompliance()) {
                    throw new StrictJpaComplianceViolation(
                            String.format(Locale.ROOT, "Strict JPQL compliance was violated : %s [%s]",
                                    StrictJpaComplianceViolation.Type.RESERVED_WORD_USED_AS_ALIAS.description(),
                                    explicitAlias),
                            StrictJpaComplianceViolation.Type.RESERVED_WORD_USED_AS_ALIAS);
                }//from w  w  w. jav a2  s.com
            }
        } else {
            explicitAlias = resultIdentifierContext.getText();
        }
        return explicitAlias;
    }

    return parsingContext.getImplicitAliasGenerator().buildUniqueImplicitAlias();
}

From source file:org.hibernate.sqm.parser.hql.internal.SemanticQueryBuilder.java

License:Apache License

private String interpretIdentificationVariable(
        HqlParser.IdentificationVariableDefContext identificationVariableDef) {
    if (identificationVariableDef != null) {
        final String explicitAlias;
        if (identificationVariableDef.AS() != null) {
            final Token identificationVariableToken = identificationVariableDef.identificationVariable()
                    .identifier().getStart();
            if (identificationVariableToken.getType() != HqlParser.IDENTIFIER) {
                // we have a reserved word used as an identification variable.
                if (parsingContext.getConsumerContext().useStrictJpaCompliance()) {
                    throw new StrictJpaComplianceViolation(
                            String.format(Locale.ROOT, "Strict JPQL compliance was violated : %s [%s]",
                                    StrictJpaComplianceViolation.Type.RESERVED_WORD_USED_AS_ALIAS.description(),
                                    identificationVariableToken.getText()),
                            StrictJpaComplianceViolation.Type.RESERVED_WORD_USED_AS_ALIAS);
                }/*from w  ww .  j  av a  2 s.com*/
            }
            explicitAlias = identificationVariableToken.getText();
        } else {
            explicitAlias = identificationVariableDef.IDENTIFIER().getText();
        }
        return explicitAlias;
    }

    return parsingContext.getImplicitAliasGenerator().buildUniqueImplicitAlias();
}

From source file:org.kaazing.k3po.lang.internal.parser.ScriptParserImpl.java

License:Open Source License

private ScriptParseException createScriptParseException(RobotParser parser, RecognitionException re) {

    if (re instanceof NoViableAltException) {
        return createScriptParseException(parser, (NoViableAltException) re);
    } else {/*  w  ww .ja va 2s.  c  o m*/
        Token token = re.getOffendingToken();
        String desc = format("line %d:%d: ", token.getLine(), token.getCharPositionInLine());

        String tokenText = token.getText();
        String msg = null;

        if (tokenText == null) {
            msg = "error: end of input";

        } else {
            desc = format("%s'%s'", desc, tokenText);

            @SuppressWarnings("unused")
            String unexpectedTokenName = token.getType() != -1 ? parser.getTokenNames()[token.getType()]
                    : parser.getTokenNames()[0];

            msg = format("error: unexpected keyword '%s'", tokenText);
        }

        return new ScriptParseException(msg, re);
    }
}

From source file:org.kaazing.k3po.lang.internal.regex.NamedGroupPattern.java

License:Open Source License

public static NamedGroupPattern compile(final String regexWithGroupNames) {
    try {//  w ww . j  a va2  s.  com
        ByteArrayInputStream input = new ByteArrayInputStream(regexWithGroupNames.getBytes(UTF_8));
        CharStream ais = new ANTLRInputStream(input);
        Lexer lexer = new RegexLexer(ais);
        TokenStream tokens = new CommonTokenStream(lexer);
        RegexParser parser = new RegexParser(tokens);
        parser.setErrorHandler(new BailErrorStrategy());
        final List<String> groupNames = new ArrayList<>();
        parser.addParseListener(new RegexBaseListener() {
            @Override
            public void exitGroupN(GroupNContext ctx) {
                Token captureVar = ctx.capture;
                // Not every entry in groupN populates groupNames
                if (captureVar != null) {
                    String capture = captureVar.getText();
                    String groupName = capture.substring(2, capture.length() - 1);
                    groupNames.add(groupName);
                }
            }
        });
        LiteralContext literal = parser.literal();
        String regex = literal.regex.getText();
        return new NamedGroupPattern(Pattern.compile(regex), groupNames);
    } catch (IOException ioe) {
        PatternSyntaxException pse = new PatternSyntaxException("I/O exception", regexWithGroupNames, 0);
        pse.initCause(ioe);
        throw pse;
    } catch (ParseCancellationException e) {
        Throwable cause = e.getCause();
        if (cause instanceof RecognitionException) {
            RecognitionException re = (RecognitionException) cause;
            PatternSyntaxException pse = new PatternSyntaxException("Unexpected type", regexWithGroupNames,
                    re.getInputStream().index());
            pse.initCause(re);
            throw pse;
        }
        throw e;
    } catch (RecognitionException re) {
        PatternSyntaxException pse = new PatternSyntaxException("Unexpected type", regexWithGroupNames,
                re.getInputStream().index());
        pse.initCause(re);
        throw pse;
    }
}