Example usage for org.antlr.v4.runtime ConsoleErrorListener INSTANCE

List of usage examples for org.antlr.v4.runtime ConsoleErrorListener INSTANCE

Introduction

In this page you can find the example usage for org.antlr.v4.runtime ConsoleErrorListener INSTANCE.

Prototype

ConsoleErrorListener INSTANCE

To view the source code for org.antlr.v4.runtime ConsoleErrorListener INSTANCE.

Click Source Link

Document

Provides a default instance of ConsoleErrorListener .

Usage

From source file:groovy.ui.text.SmartDocumentFilter.java

License:Apache License

private GroovyLangLexer createLexer(String text) throws IOException {
    CharStream charStream = CharStreams.fromReader(new StringReader(text));
    GroovyLangLexer lexer = new GroovyLangLexer(charStream);

    lexer.removeErrorListener(ConsoleErrorListener.INSTANCE);

    return lexer;
}

From source file:io.druid.server.sql.SQLRunner.java

License:Apache License

public static void main(String[] args) throws Exception {

    Options options = new Options();
    options.addOption("h", "help", false, "help");
    options.addOption("v", false, "verbose");
    options.addOption("e", "host", true, "endpoint [hostname:port]");

    CommandLine cmd = new GnuParser().parse(options, args);

    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("SQLRunner", options);
        System.exit(2);//from   ww  w  .  jav a2s  .  c o m
    }

    String hostname = cmd.getOptionValue("e", "localhost:8080");
    String sql = cmd.getArgs().length > 0 ? cmd.getArgs()[0] : STATEMENT;

    ObjectMapper objectMapper = new DefaultObjectMapper();
    ObjectWriter jsonWriter = objectMapper.writerWithDefaultPrettyPrinter();

    CharStream stream = new ANTLRInputStream(sql);
    DruidSQLLexer lexer = new DruidSQLLexer(stream);
    TokenStream tokenStream = new CommonTokenStream(lexer);
    DruidSQLParser parser = new DruidSQLParser(tokenStream);
    lexer.removeErrorListeners();
    parser.removeErrorListeners();

    lexer.addErrorListener(ConsoleErrorListener.INSTANCE);
    parser.addErrorListener(ConsoleErrorListener.INSTANCE);

    try {
        DruidSQLParser.QueryContext queryContext = parser.query();
        if (parser.getNumberOfSyntaxErrors() > 0)
            throw new IllegalStateException();
        //      parser.setBuildParseTree(true);
        //      System.err.println(q.toStringTree(parser));
    } catch (Exception e) {
        String msg = e.getMessage();
        if (msg != null)
            System.err.println(e);
        System.exit(1);
    }

    final Query query;
    final TypeReference typeRef;
    boolean groupBy = false;
    if (parser.groupByDimensions.isEmpty()) {
        query = Druids.newTimeseriesQueryBuilder().dataSource(parser.getDataSource())
                .aggregators(new ArrayList<AggregatorFactory>(parser.aggregators.values()))
                .postAggregators(parser.postAggregators).intervals(parser.intervals)
                .granularity(parser.granularity).filters(parser.filter).build();

        typeRef = new TypeReference<List<Result<TimeseriesResultValue>>>() {
        };
    } else {
        query = GroupByQuery.builder().setDataSource(parser.getDataSource())
                .setAggregatorSpecs(new ArrayList<AggregatorFactory>(parser.aggregators.values()))
                .setPostAggregatorSpecs(parser.postAggregators).setInterval(parser.intervals)
                .setGranularity(parser.granularity).setDimFilter(parser.filter)
                .setDimensions(new ArrayList<DimensionSpec>(parser.groupByDimensions.values())).build();

        typeRef = new TypeReference<List<Row>>() {
        };
        groupBy = true;
    }

    String queryStr = jsonWriter.writeValueAsString(query);
    if (cmd.hasOption("v"))
        System.err.println(queryStr);

    URL url = new URL(String.format("http://%s/druid/v2/?pretty", hostname));
    final URLConnection urlConnection = url.openConnection();
    urlConnection.addRequestProperty("content-type", MediaType.APPLICATION_JSON);
    urlConnection.getOutputStream().write(StringUtils.toUtf8(queryStr));
    BufferedReader stdInput = new BufferedReader(
            new InputStreamReader(urlConnection.getInputStream(), Charsets.UTF_8));

    Object res = objectMapper.readValue(stdInput, typeRef);

    Joiner tabJoiner = Joiner.on("\t");

    if (groupBy) {
        List<Row> rows = (List<Row>) res;
        Iterable<String> dimensions = Iterables.transform(parser.groupByDimensions.values(),
                new Function<DimensionSpec, String>() {
                    @Override
                    public String apply(@Nullable DimensionSpec input) {
                        return input.getOutputName();
                    }
                });

        System.out.println(
                tabJoiner.join(Iterables.concat(Lists.newArrayList("timestamp"), dimensions, parser.fields)));
        for (final Row r : rows) {
            System.out.println(tabJoiner.join(Iterables.concat(
                    Lists.newArrayList(parser.granularity.toDateTime(r.getTimestampFromEpoch())),
                    Iterables.transform(parser.groupByDimensions.values(),
                            new Function<DimensionSpec, String>() {
                                @Override
                                public String apply(@Nullable DimensionSpec input) {
                                    return Joiner.on(",").join(r.getDimension(input.getOutputName()));
                                }
                            }),
                    Iterables.transform(parser.fields, new Function<String, Object>() {
                        @Override
                        public Object apply(@Nullable String input) {
                            return r.getFloatMetric(input);
                        }
                    }))));
        }
    } else {
        List<Result<TimeseriesResultValue>> rows = (List<Result<TimeseriesResultValue>>) res;
        System.out.println(tabJoiner.join(Iterables.concat(Lists.newArrayList("timestamp"), parser.fields)));
        for (final Result<TimeseriesResultValue> r : rows) {
            System.out.println(tabJoiner.join(Iterables.concat(Lists.newArrayList(r.getTimestamp()),
                    Lists.transform(parser.fields, new Function<String, Object>() {
                        @Override
                        public Object apply(@Nullable String input) {
                            return r.getValue().getMetric(input);
                        }
                    }))));
        }
    }

    CloseQuietly.close(stdInput);
}

From source file:org.teavm.flavour.expr.Parser.java

License:Apache License

public ObjectExpr parseObject(String text) {
    diagnostics.clear();/* w  ww .j a va  2  s.  com*/
    try (Reader reader = new StringReader(text)) {
        ExprLexer lexer = new ExprLexer(new ANTLRInputStream(reader));
        lexer.removeErrorListener(ConsoleErrorListener.INSTANCE);
        lexer.addErrorListener(errorListener);
        ExprParser exprParser = new ExprParser(new CommonTokenStream(lexer));
        exprParser.removeErrorListener(ConsoleErrorListener.INSTANCE);
        exprParser.addErrorListener(errorListener);
        return exprParser.object().accept(objectVisitor);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:org.teavm.flavour.expr.Parser.java

License:Apache License

public Expr parse(String text) {
    diagnostics.clear();//ww w .java2s .c o  m
    try (Reader reader = new StringReader(text)) {
        ExprLexer lexer = new ExprLexer(new ANTLRInputStream(reader));
        lexer.removeErrorListener(ConsoleErrorListener.INSTANCE);
        lexer.addErrorListener(errorListener);
        ExprParser exprParser = new ExprParser(new CommonTokenStream(lexer));
        exprParser.removeErrorListener(ConsoleErrorListener.INSTANCE);
        exprParser.addErrorListener(errorListener);
        return exprParser.lambda().accept(exprVisitor);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}