Example usage for org.antlr.v4.runtime CommonTokenStream CommonTokenStream

List of usage examples for org.antlr.v4.runtime CommonTokenStream CommonTokenStream

Introduction

In this page you can find the example usage for org.antlr.v4.runtime CommonTokenStream CommonTokenStream.

Prototype

public CommonTokenStream(TokenSource tokenSource) 

Source Link

Document

Constructs a new CommonTokenStream using the specified token source and the default token channel ( Token#DEFAULT_CHANNEL ).

Usage

From source file:info.fulloo.trygve.parser.ParseRun.java

License:Open Source License

protected void commonInit() {
    final ANTLRInputStream inputStream = new ANTLRInputStream(input_);
    final KantLexer lexer = new KantLexer(inputStream);
    final CommonTokenStream commonTokenStream = new CommonTokenStream(lexer);
    final KantParser aParser = new KantParser(commonTokenStream);

    setupParseErrorReportingFor(lexer, aParser);

    try {//from   w  ww .  j av  a 2 s . c  o m
        final Method startRule = parserClass_.getMethod(startRuleName);
        final ParserRuleContext tree = (ParserRuleContext) startRule.invoke(aParser, (Object[]) null);

        if (ConfigurationOptions.treewalkTraceEnabled()) {
            ParseTreeWalker.DEFAULT.walk(new DebugPassListener(), tree);
        }
        this.pass0(parsingData_, tree);
        this.pass1(parsingData_, tree);
        this.pass2(parsingData_, tree);
        this.pass3(parsingData_, tree);

        // Pass 4 mainly does template instantiations
        this.pass4(parsingData_, tree);
    } catch (final NoSuchMethodException nsme) {
        System.err.println("No method for rule " + startRuleName + " or it has arguments");
    } catch (final InvocationTargetException ite) {
        System.err.println("InvocationTargetException");
    } catch (final IllegalAccessException iae) {
        System.err.println("IllegalAccessException");
    }
}

From source file:Input.InputGenerator.java

public InputGenerator(String seed) {
    super();/* w  ww  . j  a v  a2  s . com*/
    walker = new ParseTreeWalker();
    tree = new AmmitParser(new CommonTokenStream(new AmmitLexer(new ANTLRInputStream(seed)))).row();
}

From source file:Input.InputGenerator.java

public void setSeed(String seed) {
    tree = new AmmitParser(new CommonTokenStream(new AmmitLexer(new ANTLRInputStream(seed)))).row();
}

From source file:interpretelenguajes.InterpreteLenguajes.java

private void correrInterprete() throws IOException {

    //Obteniendo el texto de la interfaz de entrada y creando fichero de instrucciones:
    strEntrada = jTextArea_Input.getText();
    crearFichero(strEntrada);/*from   www.j  a  v a 2 s .  c o m*/

    //Proceso del Interprete
    proyectoLenguajesLexer lexer = new proyectoLenguajesLexer(
            new ANTLRFileStream("Instrucciones.proyectoLenguajes"));
    proyectoLenguajesParser parser = new proyectoLenguajesParser(new CommonTokenStream(lexer));
    ParseTree tree = parser.parse();
    //EvalVisitor visitor = new EvalVisitor();
    visitor.visit(tree);

    //Aqui se muestran los resultados del interprete, el resultado se obtiene
    //de la clase EvalVisitor, se obtienen de la variable que guarda el proceso.
    jTextArea_Salida.setText(visitor.getStrProceso());
    //jTextArea_Salida.setText(strEntrada);
}

From source file:io.cloudchaser.murmur.Murmur.java

License:Open Source License

/**
 * //from  w  w w  . j  av a 2 s .co m
 * @param args
 */
public static void main(String[] args) {
    try {
        Reader reader = new BufferedReader(new FileReader(args[0]));
        ANTLRInputStream input = new ANTLRInputStream(reader);
        MurmurLexer lexer = new MurmurLexer(input);
        TokenStream tokens = new CommonTokenStream(lexer);
        MurmurParser parser = new MurmurParser(tokens);

        // Build a symbol table for the file.
        ParseTreeVisitor visitor = new MurmurASTVisitor();
        visitor.visit(parser.compilationUnit());
    } catch (IOException ex) {
        Logger.getLogger(Murmur.class.getName()).log(Level.SEVERE, null, ex);
    } catch (MurmurError err) {
        // Output an error message.
        System.err.printf("Murmur Error: %1$s%n", err.getMessage());
        System.err.printf("(%1$s @Line %2$d)%n", args[0], err.getLineNumber());
        System.err.printf(">\t%1$s%n%n", err.getLineText());

        // Display the call stack.
        if (err.getCallStack() != null) {
            err.getCallStack().stream().forEach((object) -> {
                System.err.printf("@ Line %1$d:\t%2$s%n", object.getDeclaringLine(),
                        object.getMethodSignature());
            });
            System.err.println();
        }

        // Exit with error code.
        System.exit(1);
    }
}

From source file:io.darach.bitsyntax.BitSyntax.java

License:Open Source License

public static BitPattern compile(String packageName, String className, String source) throws IOException {
    // Convert string to stream
    final ByteArrayInputStream bais = new ByteArrayInputStream(source.getBytes());
    final ANTLRInputStream input = new ANTLRInputStream(bais);

    // Lex bit syntax expression
    final BitSyntaxLexer lexer = new BitSyntaxLexer(input);
    final CommonTokenStream tokens = new CommonTokenStream(lexer);

    // Parser lexed tokens into an AST
    final BitSyntaxParser parser = new BitSyntaxParser(tokens);
    final BitSyntaxParser.BinaryContext binary = parser.binary();

    final BitSyntaxExpandoVisitor visitor0 = new BitSyntaxExpandoVisitor();
    final BitSyntaxSourcePrinter visitor1 = new BitSyntaxSourcePrinter();
    final BitSyntaxBytecodePrinter visitor2 = new BitSyntaxBytecodePrinter();

    // Extract Segments from abstract syntax tree

    visitor0.setSource(source);/*from  ww  w  . ja v a 2 s.c o  m*/
    visitor0.setPackageName(packageName);
    visitor0.setClassName(className);
    visitor1.setSource(source);
    visitor1.setPackageName(packageName);
    visitor1.setClassName(className);
    visitor2.setSource(source);
    try {
        visit(binary, visitor0);
    } catch (Exception e) {
        e.printStackTrace();
    }
    visitor2.setExpansion(visitor0.meta());
    visitor2.setPackageName(packageName);
    visitor2.setClassName(className);
    visit(binary, visitor1);
    visit(binary, visitor2);

    try {
        return visitor2.load();
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}

From source file:io.druid.data.input.influx.InfluxParser.java

License:Apache License

@Nullable
@Override//from  w w w  .jav  a2 s . com
public Map<String, Object> parseToMap(String input) {
    CharStream charStream = new ANTLRInputStream(input);
    InfluxLineProtocolLexer lexer = new InfluxLineProtocolLexer(charStream);
    TokenStream tokenStream = new CommonTokenStream(lexer);
    InfluxLineProtocolParser parser = new InfluxLineProtocolParser(tokenStream);

    List<InfluxLineProtocolParser.LineContext> lines = parser.lines().line();
    if (parser.getNumberOfSyntaxErrors() != 0) {
        throw new ParseException("Unable to parse line.");
    }
    if (lines.size() != 1) {
        throw new ParseException("Multiple lines present; unable to parse more than one per record.");
    }

    Map<String, Object> out = new LinkedHashMap<>();

    InfluxLineProtocolParser.LineContext line = lines.get(0);
    String measurement = parseIdentifier(line.identifier());

    if (!checkWhitelist(measurement)) {
        throw new ParseException("Metric not whitelisted.");
    }

    out.put(MEASUREMENT_KEY, measurement);
    if (line.tag_set() != null) {
        line.tag_set().tag_pair().forEach(t -> parseTag(t, out));
    }

    line.field_set().field_pair().forEach(t -> parseField(t, out));

    if (line.timestamp() != null) {
        String timestamp = line.timestamp().getText();
        parseTimestamp(timestamp, out);
    }
    return out;
}

From source file:io.druid.math.expr.Parser.java

License:Apache License

@VisibleForTesting
static Expr parse(String in, ExprMacroTable macroTable, boolean withFlatten) {
    ExprLexer lexer = new ExprLexer(new ANTLRInputStream(in));
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    ExprParser parser = new ExprParser(tokens);
    parser.setBuildParseTree(true);/* ww  w. jav a  2 s.com*/
    ParseTree parseTree = parser.expr();
    ParseTreeWalker walker = new ParseTreeWalker();
    ExprListenerImpl listener = new ExprListenerImpl(parseTree, macroTable);
    walker.walk(listener, parseTree);
    return withFlatten ? flatten(listener.getAST()) : listener.getAST();
}

From source file:io.druid.server.sql.SQLRunner.java

License:Apache License

public static void main(String[] args) throws Exception {

    Options options = new Options();
    options.addOption("h", "help", false, "help");
    options.addOption("v", false, "verbose");
    options.addOption("e", "host", true, "endpoint [hostname:port]");

    CommandLine cmd = new GnuParser().parse(options, args);

    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("SQLRunner", options);
        System.exit(2);//from  ww w . ja va  2s  . c o m
    }

    String hostname = cmd.getOptionValue("e", "localhost:8080");
    String sql = cmd.getArgs().length > 0 ? cmd.getArgs()[0] : STATEMENT;

    ObjectMapper objectMapper = new DefaultObjectMapper();
    ObjectWriter jsonWriter = objectMapper.writerWithDefaultPrettyPrinter();

    CharStream stream = new ANTLRInputStream(sql);
    DruidSQLLexer lexer = new DruidSQLLexer(stream);
    TokenStream tokenStream = new CommonTokenStream(lexer);
    DruidSQLParser parser = new DruidSQLParser(tokenStream);
    lexer.removeErrorListeners();
    parser.removeErrorListeners();

    lexer.addErrorListener(ConsoleErrorListener.INSTANCE);
    parser.addErrorListener(ConsoleErrorListener.INSTANCE);

    try {
        DruidSQLParser.QueryContext queryContext = parser.query();
        if (parser.getNumberOfSyntaxErrors() > 0)
            throw new IllegalStateException();
        //      parser.setBuildParseTree(true);
        //      System.err.println(q.toStringTree(parser));
    } catch (Exception e) {
        String msg = e.getMessage();
        if (msg != null)
            System.err.println(e);
        System.exit(1);
    }

    final Query query;
    final TypeReference typeRef;
    boolean groupBy = false;
    if (parser.groupByDimensions.isEmpty()) {
        query = Druids.newTimeseriesQueryBuilder().dataSource(parser.getDataSource())
                .aggregators(new ArrayList<AggregatorFactory>(parser.aggregators.values()))
                .postAggregators(parser.postAggregators).intervals(parser.intervals)
                .granularity(parser.granularity).filters(parser.filter).build();

        typeRef = new TypeReference<List<Result<TimeseriesResultValue>>>() {
        };
    } else {
        query = GroupByQuery.builder().setDataSource(parser.getDataSource())
                .setAggregatorSpecs(new ArrayList<AggregatorFactory>(parser.aggregators.values()))
                .setPostAggregatorSpecs(parser.postAggregators).setInterval(parser.intervals)
                .setGranularity(parser.granularity).setDimFilter(parser.filter)
                .setDimensions(new ArrayList<DimensionSpec>(parser.groupByDimensions.values())).build();

        typeRef = new TypeReference<List<Row>>() {
        };
        groupBy = true;
    }

    String queryStr = jsonWriter.writeValueAsString(query);
    if (cmd.hasOption("v"))
        System.err.println(queryStr);

    URL url = new URL(String.format("http://%s/druid/v2/?pretty", hostname));
    final URLConnection urlConnection = url.openConnection();
    urlConnection.addRequestProperty("content-type", MediaType.APPLICATION_JSON);
    urlConnection.getOutputStream().write(StringUtils.toUtf8(queryStr));
    BufferedReader stdInput = new BufferedReader(
            new InputStreamReader(urlConnection.getInputStream(), Charsets.UTF_8));

    Object res = objectMapper.readValue(stdInput, typeRef);

    Joiner tabJoiner = Joiner.on("\t");

    if (groupBy) {
        List<Row> rows = (List<Row>) res;
        Iterable<String> dimensions = Iterables.transform(parser.groupByDimensions.values(),
                new Function<DimensionSpec, String>() {
                    @Override
                    public String apply(@Nullable DimensionSpec input) {
                        return input.getOutputName();
                    }
                });

        System.out.println(
                tabJoiner.join(Iterables.concat(Lists.newArrayList("timestamp"), dimensions, parser.fields)));
        for (final Row r : rows) {
            System.out.println(tabJoiner.join(Iterables.concat(
                    Lists.newArrayList(parser.granularity.toDateTime(r.getTimestampFromEpoch())),
                    Iterables.transform(parser.groupByDimensions.values(),
                            new Function<DimensionSpec, String>() {
                                @Override
                                public String apply(@Nullable DimensionSpec input) {
                                    return Joiner.on(",").join(r.getDimension(input.getOutputName()));
                                }
                            }),
                    Iterables.transform(parser.fields, new Function<String, Object>() {
                        @Override
                        public Object apply(@Nullable String input) {
                            return r.getFloatMetric(input);
                        }
                    }))));
        }
    } else {
        List<Result<TimeseriesResultValue>> rows = (List<Result<TimeseriesResultValue>>) res;
        System.out.println(tabJoiner.join(Iterables.concat(Lists.newArrayList("timestamp"), parser.fields)));
        for (final Result<TimeseriesResultValue> r : rows) {
            System.out.println(tabJoiner.join(Iterables.concat(Lists.newArrayList(r.getTimestamp()),
                    Lists.transform(parser.fields, new Function<String, Object>() {
                        @Override
                        public Object apply(@Nullable String input) {
                            return r.getValue().getMetric(input);
                        }
                    }))));
        }
    }

    CloseQuietly.close(stdInput);
}

From source file:io.github.hjuergens.time.GenerateParseTreeAST.java

License:Apache License

@Test(suiteName = "date")
public static void datesFromResource() throws IOException {
    ANTLRInputStream input = new ANTLRInputStream(
            GenerateParseTreeAST.class.getResourceAsStream("/dates.expr"));
    DatesLexer lexer = new DatesLexer(input);
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    DatesParser parser = new DatesParser(tokens);

    {//from   w  ww .j  a  va  2  s .  c  om
        ParseTree treeDate = parser.date().getRuleContext(); // parse
        assertNotNull("date parse tree is null", treeDate);
        log.info(parser.date().toStringTree());
    }

    ParseTree treeDate = parser.date(); // parse
    assertNotNull("date parse tree is null", treeDate);
    log.info(parser.date().toStringTree());

    ParseTreeWalker walker = new ParseTreeWalker(); // create standard walker
    walker.walk(extractor, parser.date()); // initiate walk of tree with listener
}