Example usage for org.antlr.v4.runtime CommonTokenStream getTokens

List of usage examples for org.antlr.v4.runtime CommonTokenStream getTokens

Introduction

In this page you can find the example usage for org.antlr.v4.runtime CommonTokenStream getTokens.

Prototype

public List<Token> getTokens() 

Source Link

Usage

From source file:org.elasticsearch.xpack.sql.parser.SqlParser.java

License:Open Source License

private <T> T invokeParser(String sql, List<SqlTypedParamValue> params,
        Function<SqlBaseParser, ParserRuleContext> parseFunction,
        BiFunction<AstBuilder, ParserRuleContext, T> visitor) {
    SqlBaseLexer lexer = new SqlBaseLexer(new CaseInsensitiveStream(sql));

    lexer.removeErrorListeners();/*  w  ww .  ja  v a2 s. c om*/
    lexer.addErrorListener(ERROR_LISTENER);

    Map<Token, SqlTypedParamValue> paramTokens = new HashMap<>();
    TokenSource tokenSource = new ParametrizedTokenSource(lexer, paramTokens, params);

    CommonTokenStream tokenStream = new CommonTokenStream(tokenSource);
    SqlBaseParser parser = new SqlBaseParser(tokenStream);

    parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames())));

    parser.removeErrorListeners();
    parser.addErrorListener(ERROR_LISTENER);

    parser.getInterpreter().setPredictionMode(PredictionMode.SLL);

    if (DEBUG) {
        debug(parser);
        tokenStream.fill();

        for (Token t : tokenStream.getTokens()) {
            String symbolicName = SqlBaseLexer.VOCABULARY.getSymbolicName(t.getType());
            String literalName = SqlBaseLexer.VOCABULARY.getLiteralName(t.getType());
            log.info(format(Locale.ROOT, "  %-15s '%s'", symbolicName == null ? literalName : symbolicName,
                    t.getText()));
        }
        ;
    }

    ParserRuleContext tree = parseFunction.apply(parser);

    if (DEBUG) {
        log.info("Parse tree {} " + tree.toStringTree());
    }

    return visitor.apply(new AstBuilder(paramTokens), tree);
}

From source file:org.icroco.chat.parser.PrintSmartRfq.java

License:Apache License

public void parseSmartRfq(String smartRfq) {
    SmartRfqLexer lexer = new SmartRfqLexer(new ANTLRInputStream(smartRfq));
    lexer.removeErrorListeners();/*  w w w .  j a v  a2 s  . c  o  m*/
    final SyntaxErrorListener syntaxErrorListener = new SyntaxErrorListener();
    lexer.addErrorListener(syntaxErrorListener);
    // Get a list of matched tokens
    CommonTokenStream tokens = new CommonTokenStream(lexer);

    // Pass the tokens to the parser
    SmartRfqParser parser = new SmartRfqParser(tokens);
    parser.removeErrorListeners();
    parser.addErrorListener(syntaxErrorListener);

    // Specify our entry point
    final SmartRfqParser.SmartrfqContext smartrfq = parser.smartrfq();
    System.out.println("Errors Nb: " + parser.getNumberOfSyntaxErrors());
    System.out.println(syntaxErrorListener.toString());
    System.out.println("Tokens: " + tokens.getTokens());
    System.out.println("instr: " + smartrfq.instrument().getText());
    System.out.println("---");
}

From source file:org.semanticwb.rdf.sparql.SparqlMain.java

License:Apache License

/**
 *
 * @param args//  ww  w .j av a 2s.  c  o m
 */
public static void main(String args[]) throws Exception {

    //      System.out.println("Work on file " + args[0]);

    int lineWidth = 80;
    if (args.length >= 2) {
        lineWidth = Integer.parseInt(args[1]);
    }

    SparqlLexer lex = null;
    try {
        String q = "PREFIX map: <http://datosabiertos.gob.mx/ontology/mapas.owl#>\n"
                + "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" + "\n" + "SELECT \n"
                + "    (CONCAT(?descripcionRecursoGeografico,\" (\",?tituloCapa,\")\") as ?titulo)\n"
                + "    (CONCAT(\n" + "        \"<h2>\",?titulo,\"</h2>\",\n"
                + "        \"Estado:\",?estado,\"<br/>\",\n" + "        \"Colonia:\",?colonia,\"<br/>\",\n"
                + "        \"Calle:\",?calle,\"<br/>\",\n" + "        \"CP:\",?cp,\"<br/>\",\n"
                + "        \"(\",?latitud,\", \",?longitud,\")\"\n" + "    ) as ?descripcion) \n"
                + "    ?latitud \n" + "    ?longitud \n" + "WHERE {\n"
                + "    ?uri rdf:type map:RecursoGeografico .\n" + "    ?uri map:capa ?capa .\n"
                + "    ?capa map:titulo ?tituloCapa .\n"
                + "    ?uri map:descripcionRecursoGeografico ?descripcionRecursoGeografico .\n"
                + "    ?uri map:estado ?estado .\n" + "    ?uri map:colonia ?colonia .\n"
                + "    ?uri map:calle ?calle .\n" + "    ?uri map:cp ?cp .\n"
                + "    ?uri map:latitud ?latitud .\n" + "    ?uri map:longitud ?longitud .\n"
                + "    filter( (?latitud>\"19.2\"^^xsd:double)  &&  (?latitud<\"19.3\"^^xsd:double)  && (?longitud<\"-99.1\"^^xsd:double)  &&  (?longitud>\"-99.2\"^^xsd:double) ) .\n"
                + "}\n" + "LIMIT 100";
        //lex = new SparqlLexer(new ANTLRFileStream(args[0]));
        lex = new SparqlLexer(new ANTLRInputStream(
                "select (count(*) as ?c) ?s ?p ?o where {?s a ?o; hola:asd <http://sdf.ser:sadasd>. ?s ?p2 ?o2}"));
        //lex = new SparqlLexer(new ANTLRInputStream(q));

    } catch (Exception ex) {
        Logger.getLogger(SparqlMain.class.getName()).log(Level.SEVERE, null, ex);
    }
    CommonTokenStream tokens = new CommonTokenStream(lex);

    System.out.println("Tokens: -------------------------------");

    tokens.fill();
    System.out.println("Number of tokens " + tokens.getTokens().size());

    List tokenList = tokens.getTokens();

    System.out.println("TokenList: -------------------------------");
    Iterator it = tokenList.iterator();
    while (it.hasNext()) {
        Token t = (Token) it.next();
        System.out.println(t.toString());
    }
    System.out.flush();

    System.out.println("Input from token list: -------------------------------");

    it = tokenList.iterator();
    while (it.hasNext()) {
        Token t = (Token) it.next();
        if (t.getType() != SparqlParser.EOF) {
            if (t.getType() == SparqlParser.WS || t.getType() == SparqlParser.COMMENT) {
                String s = t.getText();
                s = s.replace("\r\n", "\n");
                System.out.print(s);
            } else {
                System.out.print(t.getText());
            }
        }
    }
    System.out.flush();

    SparqlParser parser = new SparqlParser(tokens);
    parser.setBuildParseTree(true);

    System.out.println("Start parsing: -------------------------------");
    System.out.flush();

    ParserRuleContext t = parser.query();

    System.out.flush();
    System.out.println("Parse tree: -------------------------------");
    System.out.println(t.toStringTree(parser));

    int x = t.getRuleIndex();
    String rnames[] = parser.getRuleNames();

    getTreeText(t, rnames);

    //if(true)return;

    // visualize parse tree in dialog box 
    t.inspect(parser);

    if (parser.getNumberOfSyntaxErrors() <= 0) {

        //ParseTreeWalker walker = new ParseTreeWalker();

        String groupFile = "/programming/proys/SWB4/swb/SWBPlatform/src/org/semanticwb/rdf/sparql/ident.stg";
        if (args.length > 1) {
            groupFile = args[1];
        }
        System.out.println("Read StringTemplate Group File: " + groupFile + "-------------------------------");

        STGroup g = new STGroupFile(groupFile);
        //         IdentVisitor visitor = new IdentVisitor();
        //         visitor.setSTGroup(g);
        //         ST query = visitor.visit(t);
        //
        //         System.out.println("Emit reformatted query: -------------------------------");
        //
        //         System.out.println(query.render(lineWidth));
        //
        //         System.out.println("Emit original query: -------------------------------");
        //
        //         String q = query.render(lineWidth);
        //
        //         /* get common token stream */
        //         File tmpFile = File.createTempFile("query_", ".rq");
        //         FileOutputStream fo = new FileOutputStream(tmpFile);
        //         OutputStreamWriter ow = new OutputStreamWriter(fo, "UTF8");
        //         ow.write(q);
        //         ow.close();
        //         /* transformation pipline
        //          * step 1: Unicode pre-processing
        //          * step 2: Lexical analysis
        //          */
        //         lex = new SparqlLexer(new ANTLRFileStream(tmpFile.getCanonicalPath(), "UTF8"));
        tokens = new CommonTokenStream(lex);

        List formattedTokenList = tokens.getTokens();

        it = tokenList.iterator();
        Iterator fit = formattedTokenList.iterator();

        while (it.hasNext()) {
            Token originalToken = (Token) it.next();
            if (originalToken.getType() != SparqlParser.EOF) {
                if (originalToken.getType() == SparqlParser.WS
                        || originalToken.getType() == SparqlParser.COMMENT) {
                    String s = originalToken.getText();
                    s = s.replace("\r\n", "\n");
                    System.out.print(s);
                } else {
                    System.out.print(originalToken.getText());
                }
            }
        }
        System.out.flush();

    }
    System.out.println("-------------------------------");
    System.out.println("Number of errors encountered: " + parser.getNumberOfSyntaxErrors());
}

From source file:org.tvl.goworks.editor.go.parser.CompiledModelParser.java

License:Open Source License

protected CompiledModel parseImpl(@NonNull ParserTaskManager taskManager, ParseContext context,
        DocumentSnapshot snapshot) throws InterruptedException, ExecutionException {

    Parameters.notNull("snapshot", snapshot);

    synchronized (lock) {
        if (snapshot.equals(lastSnapshot)) {
            if (lastException != null) {
                throw new ExecutionException("An unexpected error occurred.", lastException);
            }/*from w  ww.  ja  v  a  2s.c om*/

            return new CompiledModel(snapshot, lastResult);
        }

        if (LOGGER.isLoggable(Level.FINE)) {
            LOGGER.log(Level.FINE, "Reparsing snapshot {0}", snapshot);
        }

        try {
            Future<ParserData<Tagger<TokenTag<Token>>>> futureTokensData = taskManager.getData(snapshot,
                    GoParserDataDefinitions.LEXER_TOKENS);
            Tagger<TokenTag<Token>> tagger = futureTokensData != null ? futureTokensData.get().getData() : null;
            TaggerTokenSource tokenSource = new TaggerTokenSource(tagger, snapshot);
            CommonTokenStream tokenStream = new CommonTokenStream(tokenSource);
            GoParser parser = GoParserFactory.DEFAULT.getParser(tokenStream, ParserConfiguration.FASTEST);
            try {
                SyntaxErrorListener syntaxErrorListener = new SyntaxErrorListener(snapshot);
                SourceFileContext sourceFileContext;
                try {
                    try {
                        sourceFileContext = parser.sourceFile();
                    } catch (ParseCancellationException ex) {
                        if (ex.getCause() instanceof RecognitionException) {
                            // retry with hybrid parser
                            tokenStream.reset();
                            parser = GoParserFactory.DEFAULT.getParser(tokenStream, ParserConfiguration.HYBRID);
                            sourceFileContext = parser.sourceFile();
                        } else {
                            throw ex;
                        }
                    }
                } catch (ParseCancellationException ex) {
                    if (ex.getCause() instanceof RecognitionException) {
                        // retry with precise parser and default error handler
                        tokenStream.reset();
                        parser = GoParserFactory.DEFAULT.getParser(tokenStream, ParserConfiguration.PRECISE);
                        parser.removeErrorListeners();
                        parser.addErrorListener(syntaxErrorListener);
                        sourceFileContext = parser.sourceFile();
                    } else {
                        throw ex;
                    }
                }

                FileObject fileObject = snapshot.getVersionedDocument().getFileObject();
                Token[] groupTokens = tokenStream.getTokens().toArray(new Token[0]);
                lastSnapshot = snapshot;
                lastResult = new CompiledFileModel(sourceFileContext, syntaxErrorListener.getSyntaxErrors(),
                        fileObject, groupTokens);
                lastException = null;
                return new CompiledModel(snapshot, lastResult);
            } catch (RecognitionException ex) {
                if (LOGGER.isLoggable(Level.FINE)) {
                    LOGGER.log(Level.FINE, "A recognition exception occurred while parsing.", ex);
                }

                lastSnapshot = snapshot;
                lastResult = null;
                lastException = null;
                return null;
            }
        } catch (InterruptedException | ExecutionException | ParseCancellationException ex) {
            lastSnapshot = snapshot;
            lastResult = null;
            lastException = ex;
            throw new ExecutionException("An unexpected error occurred.", ex);
        }
    }
}

From source file:x10dt.ui.contentProposer.X10ContentProposer.java

License:Open Source License

public ICompletionProposal[] getContentProposals(IParseController controller, int offset, ITextViewer viewer) {

    ArrayList<ICompletionProposal> list = new ArrayList<ICompletionProposal>();

    CommonTokenStream tokens = ((ParseController) controller).getTokens();

    Token tokenToComplete = null;//from  w w  w . ja v a  2s  .  c o m
    Token previousToken = null;
    Token nextToken = null;

    int index = 0;
    for (Token t : tokens.getTokens()) {
        index++;
        if (t.getChannel() == Token.DEFAULT_CHANNEL) {
            if (t.getStartIndex() <= offset && t.getStopIndex() + 1 >= offset) {
                tokenToComplete = t;
                break;
            }
            if (t.getStartIndex() > offset) {
                break;
            }
            previousToken = t;
        }
    }

    if (tokenToComplete == null) {
        nextToken = tokens.getTokens().get(index);
    }

    String prefix = tokenToComplete == null ? ""
            : computePrefixOfToken(tokenToComplete, offset, (ParseController) controller);

    PolyglotNodeLocator locator = new PolyglotNodeLocator(
            controller.getProject()/*,((ParseController) controller).getLexStream()*/);
    Node currentAst = (Node) controller.getCurrentAst();
    Node node = tokenToComplete != null
            ? (Node) locator.findNode(currentAst, tokenToComplete.getStartIndex(),
                    tokenToComplete.getStopIndex())
            : null;
    Node previousNode = (previousToken != null)
            ? (Node) locator.findNode(currentAst, previousToken.getStartIndex(), previousToken.getStopIndex())
            : null;
    Node nextNode = (nextToken != null)
            ? (Node) locator.findNode(currentAst, nextToken.getStartIndex(), nextToken.getStopIndex())
            : null;

    if (node != null && node instanceof Eval && tokenToComplete.getType() == X10Parser.DOT) {
        Type type = ((Eval_c) node).expr().type();
        if (type != null && type.isReference()) {
            getCandidates((ObjectType) type, list, prefix, offset, true);
        }
    } else if (node != null && node instanceof Id && previousNode instanceof Field) {
        Type type = ((Field_c) previousNode).target().type();
        if (type != null && type.isReference()) {
            getCandidates((ObjectType) type, list, prefix, offset, true);
        }
    } else if (node != null && node instanceof Id && previousNode instanceof Call) {
        Type type = ((Call_c) previousNode).target().type();
        if (type != null && type.isReference()) {
            getCandidates((ObjectType) type, list, prefix, offset, true);
        }

        //The next case completes an Id with names in scope  
    } else if (node != null && node instanceof Id) {
        Node n = (node instanceof Id) ? node : previousNode;
        String pref = (node instanceof Id) ? prefix
                : computePrefixOfToken(previousToken, offset, (ParseController) controller);
        addNamesInScope(currentAst, n, pref, list, offset, !EMPTY_PREFIX_MATCHES);

    } else if (node == null && previousNode != null) { //Display templates, names in scope -- index < 0 when we are at a white space or comment
        Node location = location(previousNode, nextNode, locator, currentAst);
        if (location instanceof Block && (justAfter(X10Parser.SEMICOLON, previousToken)
                || justAfter(X10Parser.RBRACE, previousToken) || justAfter(X10Parser.LBRACE, previousToken))) { //Statement context. 
            addTemplateProposals(offset, viewer, list, prefix, fTemplates);
            //addNamesInScope(currentAst, node, prefix, list, offset, EMPTY_PREFIX_MATCHES);
        } else if (justAfter(X10Parser.EQUAL, previousToken)
                && (location instanceof Assign || location instanceof LocalDecl)) {
            Template[] templates = new Template[] { fAtExpressionTemplate, fCoercionTemplate, fRegion1DTemplate,
                    fRegion2DTemplate };
            addTemplateProposals(offset, viewer, list, prefix, templates);
        } else if (location instanceof ClassBody) { //Class context
            Template[] templates = new Template[] { fVariableDeclaration, fValueDeclaration, fConstDeclaration,
                    fPropertyDeclaration, fMainMethod, fMethodTemplate, fConstructorTemplate, fClassTemplate,
                    fStructTemplate, fDependentTypeDeclaration, };
            addTemplateProposals(offset, viewer, list, prefix, templates);

        }
    }

    return (ICompletionProposal[]) list.toArray(new ICompletionProposal[list.size()]);
}