Example usage for org.antlr.v4.runtime CommonTokenStream CommonTokenStream

List of usage examples for org.antlr.v4.runtime CommonTokenStream CommonTokenStream

Introduction

In this page you can find the example usage for org.antlr.v4.runtime CommonTokenStream CommonTokenStream.

Prototype

public CommonTokenStream(TokenSource tokenSource) 

Source Link

Document

Constructs a new CommonTokenStream using the specified token source and the default token channel ( Token#DEFAULT_CHANNEL ).

Usage

From source file:App.java

License:Apache License

public static void main(String[] args) {
    try {//from w  w  w .j  av  a 2s  .  c om
        //      ANTLRInputStream input = new ANTLRInputStream("doc(doc)/PERSONAE");
        ANTLRInputStream input = new ANTLRInputStream("doc(doc)/PERSONAE/TITLE");

        xPathLexer lexer = new xPathLexer(input);
        CommonTokenStream tokens = new CommonTokenStream(lexer);

        xPathParser parser = new xPathParser(tokens);
        parser.removeErrorListeners();
        ParseTree tree = parser.ap();
        EvalXpath evalpath = new EvalXpath();
        evalpath.visit(tree);

    } catch (Exception e) {
        e.printStackTrace();
        System.err.println("Error: " + e.getMessage());
    }
}

From source file:HSMgen.java

License:Open Source License

public static void main(String[] args) throws Exception {
    String inputFile = null;/* ww  w . j a  va 2s  .c o  m*/

    // Push global environment.
    pushEnv(new SymEnv());

    if (args.length > 0)
        inputFile = args[0];
    InputStream is = System.in;
    if (inputFile != null)
        is = new FileInputStream(inputFile);
    HSMgenLexer lexer = new HSMgenLexer(new ANTLRInputStream(is));
    HSMgenParser parser = new HSMgenParser(new CommonTokenStream(lexer));
    parser.setBuildParseTree(true); // tell ANTLR to build a parse tree
    ParseTree tree = parser.init();

    // Needed for 'NULL' definition.
    System.out.println("#include <cstddef>");

    new ParseTreeWalker().walk(new HSMgen(), tree);
}

From source file:ComplexityListenerAggregator.java

License:Open Source License

private static ParseTree GetFileParseTree(String filePath) throws Exception {
    BufferedReader fs = new BufferedReader(new FileReader(filePath));
    System.out.println("Parsing file: " + filePath);

    // create a CharStream that reads from standard input
    ANTLRInputStream input = new ANTLRInputStream(fs);

    // create a lexer that feeds off of input CharStream
    VisualBasic6Lexer lexer = new VisualBasic6Lexer(input);

    // create a buffer of tokens pulled from the lexer
    CommonTokenStream tokens = new CommonTokenStream(lexer);

    // parse the file
    VisualBasic6Parser parser = new VisualBasic6Parser(tokens);
    ParseTree tree = parser.file();/*from   w  ww .j  a  v  a2 s .co m*/

    return tree;
}

From source file:SparqlMain.java

License:Apache License

/**
 *
 * @param args// w  ww  .  jav  a2  s  .c  o  m
 */
public static void main(String args[]) throws Exception {

    System.out.println("Work on file " + args[0]);

    int lineWidth = 80;
    if (args.length >= 2) {
        lineWidth = Integer.parseInt(args[1]);
    }

    SparqlLexer lex = null;
    try {
        lex = new SparqlLexer(new ANTLRFileStream(args[0]));
    } catch (IOException ex) {
        Logger.getLogger(SparqlMain.class.getName()).log(Level.SEVERE, null, ex);
    }
    CommonTokenStream tokens = new CommonTokenStream(lex);

    System.out.println("Tokens: -------------------------------");

    tokens.fill();
    System.out.println("Number of tokens " + tokens.getTokens().size());

    List tokenList = tokens.getTokens();

    System.out.println("TokenList: -------------------------------");
    Iterator it = tokenList.iterator();
    while (it.hasNext()) {
        Token t = (Token) it.next();
        System.out.println(t.toString());
    }
    System.out.flush();

    System.out.println("Input from token list: -------------------------------");

    it = tokenList.iterator();
    while (it.hasNext()) {
        Token t = (Token) it.next();
        if (t.getType() != SparqlParser.EOF) {
            if (t.getType() == SparqlParser.WS || t.getType() == SparqlParser.COMMENT) {
                String s = t.getText();
                s = s.replace("\r\n", "\n");
                if (!System.lineSeparator().equals("\n")) {
                    s = s.replace("\n", System.lineSeparator());
                }
                System.out.print(s);
            } else {
                System.out.print(t.getText());
            }
        }
    }
    System.out.flush();

    SparqlParser parser = new SparqlParser(tokens);
    parser.setBuildParseTree(true);

    System.out.println("Start parsing: -------------------------------");
    System.out.flush();

    ParserRuleContext t = parser.query();

    System.out.flush();
    System.out.println("Parse tree: -------------------------------");
    System.out.println(t.toStringTree(parser));

    // visualize parse tree in dialog box 
    t.inspect(parser);

    if (parser.getNumberOfSyntaxErrors() <= 0) {

        //ParseTreeWalker walker = new ParseTreeWalker();

        String groupFile = "ident.stg";
        if (args.length > 1) {
            groupFile = args[1];
        }
        System.out.println("Read StringTemplate Group File: " + groupFile + "-------------------------------");

        STGroup g = new STGroupFile(groupFile);
        IdentVisitor visitor = new IdentVisitor();
        visitor.setSTGroup(g);
        ST query = visitor.visit(t);

        System.out.println("Emit reformatted query: -------------------------------");

        System.out.println(query.render(lineWidth));

        System.out.println("Emit original query: -------------------------------");

        String q = query.render(lineWidth);

        /* get common token stream */
        File tmpFile = File.createTempFile("query_", ".rq");
        FileOutputStream fo = new FileOutputStream(tmpFile);
        OutputStreamWriter ow = new OutputStreamWriter(fo, "UTF8");
        ow.write(q);
        ow.close();
        /* transformation pipline
         * step 1: Unicode pre-processing
         * step 2: Lexical analysis
         */
        lex = new SparqlLexer(new ANTLRFileStream(tmpFile.getCanonicalPath(), "UTF8"));
        tokens = new CommonTokenStream(lex);

        List formattedTokenList = tokens.getTokens();

        it = tokenList.iterator();
        Iterator fit = formattedTokenList.iterator();

        boolean lineSeparatorHasToBeModified = !System.lineSeparator().equals("\n");

        while (it.hasNext()) {
            Token originalToken = (Token) it.next();
            if (originalToken.getType() != SparqlParser.EOF) {
                if (originalToken.getType() == SparqlParser.WS
                        || originalToken.getType() == SparqlParser.COMMENT) {
                    String s = originalToken.getText();
                    s = s.replace("\r\n", "\n");
                    if (lineSeparatorHasToBeModified) {
                        s = s.replace("\n", System.lineSeparator());
                    }
                    System.out.print(s);
                } else {
                    System.out.print(originalToken.getText());
                }
            }
        }
        System.out.flush();

    }
    System.out.println("-------------------------------");
    System.out.println("Number of errors encountered: " + parser.getNumberOfSyntaxErrors());
}

From source file:Extended$parser_class.java

License:BSD License

public static void main(String[] args) {
    try {/*from   www. ja  va 2s.c o m*/
        ExtendedTargetLexer lexer = new ExtendedTargetLexer(
                new ANTLRInputStream(new DataInputStream(System.in)));
        lexer.addErrorListener(new ExtendedErrorListener());
        CommonTokenStream tokens = new CommonTokenStream(lexer);
        Extended$parser_class parser = new Extended$parser_class(tokens);
        ExtendedTargetListener listener = new ExtendedTargetListener(parser);

        parser.addParseListener(listener);
        Extended$parser_class.class.getMethod(args[0]).invoke(parser);
        parser.syntaxErrorWarning();
        try (JsonWriter w = Json.createWriter(System.out)) {
            w.write(listener.root.createJsonObjectBuilder().build());
        }
    } catch (Exception e) {
        e.printStackTrace(System.err);
        System.exit(1);
    }
}

From source file:tns2toad.java

License:Open Source License

public static void main(String[] args) throws Exception {

    // A bit of "sign on and blow my own trumpet stuff" ;-)
    String thisVersion = "0.1"; // Version of this utility.

    // Assume tnsnames.ora will be piped via stdin, unless we get a parameter passed.
    String tnsnamesFilename = null;
    InputStream iStream = System.in;

    // How many positional options are we expecting?
    int expectedPositionalArgs = 1;

    // These are collected from the command line.
    File inputFile = null;/*from www  . jav  a2 s. c o  m*/
    String oracleHome = "";
    String userName = "";

    // These are used to process the command line.
    int i = 0;
    String thisArg;

    //---------------------------------------------------------
    // Let's scan the command line and see what needs doing ...
    //---------------------------------------------------------

    // Scan along the args array, looking at all the options. These
    // are all  prefixed by "--" and must all be before any of the
    // positional arguments.
    // When we find one, we zap it!
    // Each option takes a parameter - they get zapped also.
    // ThisArg holds the argument, i points at the parameter for it.
    while (i < args.length && args[i].startsWith("--")) {
        thisArg = args[i].toLowerCase();
        args[i++] = "";

        // Oracle Home...
        if (thisArg.equals("--oracle_home")) {
            if (i < args.length) {
                oracleHome = args[i];
                args[i++] = "";
            } else {
                usage("ERROR: --oracle_home requires a path name");
            }
        }
        // User name...
        else if (thisArg.equals("--user")) {
            if (i < args.length) {
                userName = args[i];
                args[i++] = "";
            } else {
                usage("ERROR: --user requires a username");
            }
        }
        // Something else? Not permitted.
        else {
            usage("Invalid option '" + thisArg + "'");
        }

    }

    // At this point we should be sitting with i pointing at the first
    // positional argument. Scan those next. All the options have been
    // extracted now, and zapped.

    // However, just exactly how many positional args do we want? This will
    // also catch any --options mingling within the positional args.
    if (i != args.length - expectedPositionalArgs) {
        usage("Unexpected or insufficient positional parameter(s) supplied.");
    }

    // We should only have a single parameter here, the tnsnames.ora file.
    tnsnamesFilename = args[i];

    //---------------------------------------------------------
    // Well, we got here, args on the command line must be ok
    // Check if we can open and/or read the tnsnames.ora file.
    //---------------------------------------------------------
    inputFile = new File(tnsnamesFilename);
    if (inputFile.isFile()) {
        iStream = new FileInputStream(tnsnamesFilename);
        //tnsnamesFilename = inputFile.getCanonicalPath();
    } else {
        System.out.println("\nERROR 1: '" + tnsnamesFilename + "' is not a valid filename.\n");
        System.exit(1); // Error exit.
    }

    //---------------------------------------------------------
    // Everything is fine, let's JFDI! :-)
    //---------------------------------------------------------

    // Feed the tnsnames.ora file into the lexer and get a
    // token stream from the lexer...
    ANTLRInputStream input = new ANTLRInputStream(iStream);
    tnsnamesLexer lexer = new tnsnamesLexer(input);

    // Feed the lexer's token stream to the parser and get
    // a parse tree out in return...
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    tnsnamesParser parser = new tnsnamesParser(tokens);
    ParseTree tree = parser.tnsnames();

    // Feed the parse tree to the tree walker & the listener
    // and get a load of text on stdout as a final result.
    // That is your import file, redirect it to a file and
    // let Toad import it for you.
    ParseTreeWalker tnsWalker = new ParseTreeWalker();
    tns2toadListener tnsListener = new tns2toadListener(parser, userName, oracleHome);
    tnsWalker.walk(tnsListener, tree);
}

From source file:SExpr.java

License:Open Source License

public MusicboxParser parse(String query) {

    // create a lexer that feeds off of input CharStream
    MusicboxLexer lexer = new MusicboxLexer(new ANTLRInputStream(query));
    // create a buffer of tokens pulled from the lexer
    CommonTokenStream tokens = new CommonTokenStream(lexer); // create a parser that feeds off the tokens buffer
    MusicboxParser parser = new MusicboxParser(tokens);
    return parser;

}

From source file:Cymbol.java

/**
 * @param args the command line arguments
 *//*from  w w  w  .  jav a  2 s  . co m*/
public static void main(String[] args) {

    if (args.length == 1) {
        try {
            // parsing
            // create file input stream
            FileInputStream source = new FileInputStream(args[0]);
            // create a CharStream that reads from standard input
            ANTLRInputStream input = new ANTLRInputStream(source);
            // create a lexer that feeds off of input CharStream
            CymbolLexer lexer = new CymbolLexer(input);
            // create a buffer of tokens pulled from the lexer
            CommonTokenStream tokens = new CommonTokenStream(lexer);
            // create a parser that feeds off the tokens buffer
            CymbolParser parser = new CymbolParser(tokens);
            ParseTree tree = parser.file(); // begin parsing at init rule

            // dump ast
            System.out.println(tree.toStringTree(parser)); // print LISP-style tree

            // build call graph
            ParseTreeWalker walker = new ParseTreeWalker();
            FunctionListener collector = new FunctionListener();
            walker.walk(collector, tree);
            System.out.println(collector.graph.toString());
            System.out.println(collector.graph.toDOT());

        } catch (IOException e) {
            System.out.print("error: " + e.getMessage());
        }
    } else {
        System.out.print("error: syntax is Cymbol <file path> !");
    }
}

From source file:Utah01.java

/**
 * @param args the command line arguments
 *///from  w w  w .  j a v  a 2 s .c om
public static void main(String[] args) {
    // TODO code application logic here
    try {
        // make Lexer
        ANTLRFileStream inputStream = new ANTLRFileStream(args[0]);
        utah01Lexer lexer = new utah01Lexer(inputStream);

        // make Parser
        CommonTokenStream tokens = new CommonTokenStream(lexer);
        utah01Parser parser = new utah01Parser(tokens);

        // make Walker
        ParseTreeWalker parseTreeWalker = new ParseTreeWalker();
        utah01ListenerForJSON listener = new utah01ListenerForJSON();
        ParserRuleContext parserRuleContext = parser.start();

        // do walk
        parseTreeWalker.walk(listener, parserRuleContext);
    } catch (Exception e) {
        System.out.println(e);
    }
    //System.out.print("number of elements = ");
    //System.out.println(SymbolTable.count);
}

From source file:Ecmascript.java

/**
 * @param args the command line arguments
 *///  w  ww.  j a  va  2s  . c  o m
public static void main(String[] args) {
    if (args.length == 1) {
        try {
            // parsing
            // create file input stream
            FileInputStream source = new FileInputStream(args[0]);
            // create a CharStream that reads from standard input
            ANTLRInputStream input = new ANTLRInputStream(source);
            // create a lexer that feeds off of input CharStream
            ECMAScriptLexer lexer = new ECMAScriptLexer(input);
            // create a buffer of tokens pulled from the lexer
            CommonTokenStream tokens = new CommonTokenStream(lexer);
            // create a parser that feeds off the tokens buffer
            ECMAScriptParser parser = new ECMAScriptParser(tokens);
            ParseTree tree = parser.program(); // begin parsing at init rule

            // dump ast
            System.out.println("AST is : " + tree.toStringTree(parser)); // print LISP-style tree

            // build call graph
            ParseTreeWalker walker = new ParseTreeWalker();
            FunctionListener collector = new FunctionListener();
            walker.walk(collector, tree);
            System.out.println(collector.graph.toString());
            System.out.println(collector.graph.toDOT());

        } catch (IOException e) {
            System.out.print("error: " + e.getMessage());
        }
    } else {
        System.out.print("error: syntax is Cymbol <file path> !");
    }
}