List of usage examples for org.antlr.v4.runtime UnbufferedTokenStream LA
@Override
public int LA(int i)
From source file:ai.grakn.graql.internal.parser.QueryParser.java
License:Open Source License
/** * @param queryString a string representing several queries * @return a list of queries/*from www .j a v a2s .c om*/ */ public <T extends Query<?>> Stream<T> parseList(String queryString) { GraqlLexer lexer = getLexer(queryString); GraqlErrorListener errorListener = new GraqlErrorListener(queryString); lexer.removeErrorListeners(); lexer.addErrorListener(errorListener); UnbufferedTokenStream tokenStream = new UnbufferedTokenStream(lexer); // Merge any match...insert queries together // TODO: Find a way to NOT do this horrid thing AbstractIterator<T> iterator = new AbstractIterator<T>() { @Nullable T previous = null; @Override protected T computeNext() { if (tokenStream.LA(1) == GraqlLexer.EOF) { if (previous != null) { return swapPrevious(null); } else { endOfData(); return null; } } TokenSource oneQuery = consumeOneQuery(tokenStream); T current = parseQueryFragment(GraqlParser::query, (q, t) -> (T) q.visitQuery(t), oneQuery, errorListener); if (previous == null) { previous = current; return computeNext(); } else if (previous instanceof MatchQuery && current instanceof InsertQuery) { return (T) joinMatchInsert((MatchQuery) swapPrevious(null), (InsertQuery) current); } else { return swapPrevious(current); } } private T swapPrevious(T newPrevious) { T oldPrevious = previous; previous = newPrevious; return oldPrevious; } private InsertQuery joinMatchInsert(MatchQuery match, InsertQuery insert) { return match.insert(insert.admin().getVars()); } }; Iterable<T> iterable = () -> iterator; return StreamSupport.stream(iterable.spliterator(), false); }
From source file:ai.grakn.graql.internal.parser.QueryParserImpl.java
License:Open Source License
/** * @param reader a reader representing several queries * @return a list of queries/*from w ww . j a v a2 s . co m*/ */ @Override public <T extends Query<?>> Stream<T> parseList(Reader reader) { UnbufferedCharStream charStream = new UnbufferedCharStream(reader); GraqlErrorListener errorListener = GraqlErrorListener.withoutQueryString(); GraqlLexer lexer = createLexer(charStream, errorListener); /* We tell the lexer to copy the text into each generated token. Normally when calling `Token#getText`, it will look into the underlying `TokenStream` and call `TokenStream#size` to check it is in-bounds. However, `UnbufferedTokenStream#size` is not supported (because then it would have to read the entire input). To avoid this issue, we set this flag which will copy over the text into each `Token`, s.t. that `Token#getText` will just look up the copied text field. */ lexer.setTokenFactory(new CommonTokenFactory(true)); // Use an unbuffered token stream so we can handle extremely large input strings UnbufferedTokenStream tokenStream = new UnbufferedTokenStream(ChannelTokenSource.of(lexer)); GraqlParser parser = createParser(tokenStream, errorListener); /* The "bail" error strategy prevents us reading all the way to the end of the input, e.g. ``` match $x isa person; insert $x has name "Bob"; match $x isa movie; get; ^ ``` In this example, when ANTLR reaches the indicated `match`, it considers two possibilities: 1. this is the end of the query 2. the user has made a mistake. Maybe they accidentally pasted the `match` here. Because of case 2, ANTLR will parse beyond the `match` in order to produce a more helpful error message. This causes memory issues for very large queries, so we use the simpler "bail" strategy that will immediately stop when it hits `match`. */ parser.setErrorHandler(new BailErrorStrategy()); // This is a lazy iterator that will only consume a single query at a time, without parsing any further. // This means it can pass arbitrarily long streams of queries in constant memory! Iterable<T> queryIterator = () -> new AbstractIterator<T>() { @Nullable @Override protected T computeNext() { int latestToken = tokenStream.LA(1); if (latestToken == Token.EOF) { endOfData(); return null; } else { // This will parse and consume a single query, even if it doesn't reach an EOF // When we next run it, it will start where it left off in the stream return (T) QUERY.parse(parser, errorListener); } } }; return StreamSupport.stream(queryIterator.spliterator(), false); }