Java 类org.antlr.v4.runtime.CommonTokenFactory 实例源码

项目:pinot    文件:Pql2Compiler.java   
@Override
public TransformExpressionTree compileToExpressionTree(String expression) {
  CharStream charStream = new ANTLRInputStream(expression);
  PQL2Lexer lexer = new PQL2Lexer(charStream);
  lexer.setTokenFactory(new CommonTokenFactory(true));
  TokenStream tokenStream = new UnbufferedTokenStream<CommonToken>(lexer);
  PQL2Parser parser = new PQL2Parser(tokenStream);
  parser.setErrorHandler(new BailErrorStrategy());

  // Parse
  ParseTree parseTree = parser.expression();

  ParseTreeWalker walker = new ParseTreeWalker();
  Pql2AstListener listener = new Pql2AstListener(expression, _splitInClause);
  walker.walk(listener, parseTree);

  final AstNode rootNode = listener.getRootNode();
  return TransformExpressionTree.buildTree(rootNode);
}
项目:M2Doc    文件:M2DocParser.java   
/**
 * Parses while matching an AQL expression.
 * 
 * @param expression
 *            the expression to parse
 * @return the corresponding {@link AstResult}
 */
private AstResult parseWhileAqlExpression(String expression) {
    final IQueryBuilderEngine.AstResult result;

    if (expression != null && expression.length() > 0) {
        AstBuilderListener astBuilder = AQL56Compatibility.createAstBuilderListener(queryEnvironment);
        CharStream input = new UnbufferedCharStream(new StringReader(expression), expression.length());
        QueryLexer lexer = new QueryLexer(input);
        lexer.setTokenFactory(new CommonTokenFactory(true));
        lexer.removeErrorListeners();
        lexer.addErrorListener(astBuilder.getErrorListener());
        TokenStream tokens = new UnbufferedTokenStream<CommonToken>(lexer);
        QueryParser parser = new QueryParser(tokens);
        parser.addParseListener(astBuilder);
        parser.removeErrorListeners();
        parser.addErrorListener(astBuilder.getErrorListener());
        // parser.setTrace(true);
        parser.expression();
        result = astBuilder.getAstResult();
    } else {
        ErrorExpression errorExpression = (ErrorExpression) EcoreUtil
                .create(AstPackage.eINSTANCE.getErrorExpression());
        List<org.eclipse.acceleo.query.ast.Error> errors = new ArrayList<org.eclipse.acceleo.query.ast.Error>(1);
        errors.add(errorExpression);
        final Map<Object, Integer> positions = new HashMap<Object, Integer>();
        if (expression != null) {
            positions.put(errorExpression, Integer.valueOf(0));
        }
        final BasicDiagnostic diagnostic = new BasicDiagnostic();
        diagnostic.add(new BasicDiagnostic(Diagnostic.ERROR, AstBuilderListener.PLUGIN_ID, 0,
                "null or empty string.", new Object[] {errorExpression }));
        result = new AstResult(errorExpression, positions, positions, errors, diagnostic);
    }

    return result;
}
项目:M2Doc    文件:TemplateCustomProperties.java   
/**
 * Parses while matching an AQL expression.
 * 
 * @param queryEnvironment
 *            the {@link IReadOnlyQueryEnvironment}
 * @param type
 *            the type to parse
 * @return the corresponding {@link AstResult}
 */
private AstResult parseWhileAqlTypeLiteral(IReadOnlyQueryEnvironment queryEnvironment, String type) {
    final IQueryBuilderEngine.AstResult result;

    if (type != null && type.length() > 0) {
        AstBuilderListener astBuilder = AQL56Compatibility
                .createAstBuilderListener((IQueryEnvironment) queryEnvironment);
        CharStream input = new UnbufferedCharStream(new StringReader(type), type.length());
        QueryLexer lexer = new QueryLexer(input);
        lexer.setTokenFactory(new CommonTokenFactory(true));
        lexer.removeErrorListeners();
        lexer.addErrorListener(astBuilder.getErrorListener());
        TokenStream tokens = new UnbufferedTokenStream<CommonToken>(lexer);
        QueryParser parser = new QueryParser(tokens);
        parser.addParseListener(astBuilder);
        parser.removeErrorListeners();
        parser.addErrorListener(astBuilder.getErrorListener());
        // parser.setTrace(true);
        parser.typeLiteral();
        result = astBuilder.getAstResult();
    } else {
        ErrorTypeLiteral errorTypeLiteral = (ErrorTypeLiteral) EcoreUtil
                .create(AstPackage.eINSTANCE.getErrorTypeLiteral());
        List<org.eclipse.acceleo.query.ast.Error> errors = new ArrayList<org.eclipse.acceleo.query.ast.Error>(1);
        errors.add(errorTypeLiteral);
        final Map<Object, Integer> positions = new HashMap<Object, Integer>();
        if (type != null) {
            positions.put(errorTypeLiteral, Integer.valueOf(0));
        }
        final BasicDiagnostic diagnostic = new BasicDiagnostic();
        diagnostic.add(new BasicDiagnostic(Diagnostic.ERROR, AstBuilderListener.PLUGIN_ID, 0, "null or empty type.",
                new Object[] {errorTypeLiteral }));
        result = new AstResult(errorTypeLiteral, positions, positions, errors, diagnostic);
    }

    return result;
}
项目:antsdb    文件:Select_or_valuesGenerator.java   
private static RuleContext createColumnName_(Expr_functionContext rule, OutputField field) {
    Column_name_Context column_name_ = new Column_name_Context(rule.getParent(), rule.invokingState);
    Column_nameContext column_name = new Column_nameContext(column_name_.getParent(), rule.invokingState);
    IdentifierContext identifier = new IdentifierContext(column_name, rule.invokingState);
    CommonToken token = CommonTokenFactory.DEFAULT.create(
            MysqlParser.BACKTICK_QUOTED_IDENTIFIER, 
            '`' + field.name + '`' );
    TerminalNode term = new TerminalNodeImpl(token);
    identifier.addChild(term);
    column_name.addChild(identifier);
    column_name_.addChild(column_name);
    return column_name_;
}
项目:indexr    文件:IndexRQL.java   
private static ParseTree parseSQL(String sql) {
    CharStream charStream = new ANTLRInputStream(sql);
    RQLLexer lexer = new RQLLexer(charStream);
    lexer.setTokenFactory(new CommonTokenFactory(true));
    TokenStream tokenStream = new UnbufferedTokenStream<CommonToken>(lexer);
    RQLParser parser = new RQLParser(tokenStream);
    parser.setErrorHandler(new BailErrorStrategy());
    return parser.root();
}
项目:grakn    文件:QueryParserImpl.java   
/**
 * @param reader a reader representing several queries
 * @return a list of queries
 */
@Override
public <T extends Query<?>> Stream<T> parseList(Reader reader) {
    UnbufferedCharStream charStream = new UnbufferedCharStream(reader);
    GraqlErrorListener errorListener = GraqlErrorListener.withoutQueryString();
    GraqlLexer lexer = createLexer(charStream, errorListener);

    /*
        We tell the lexer to copy the text into each generated token.
        Normally when calling `Token#getText`, it will look into the underlying `TokenStream` and call
        `TokenStream#size` to check it is in-bounds. However, `UnbufferedTokenStream#size` is not supported
        (because then it would have to read the entire input). To avoid this issue, we set this flag which will
        copy over the text into each `Token`, s.t. that `Token#getText` will just look up the copied text field.
    */
    lexer.setTokenFactory(new CommonTokenFactory(true));

    // Use an unbuffered token stream so we can handle extremely large input strings
    UnbufferedTokenStream tokenStream = new UnbufferedTokenStream(ChannelTokenSource.of(lexer));

    GraqlParser parser = createParser(tokenStream, errorListener);

    /*
        The "bail" error strategy prevents us reading all the way to the end of the input, e.g.
match $x isa person; insert $x has name "Bob"; match $x isa movie; get;
                                                   ^
    ```

    In this example, when ANTLR reaches the indicated `match`, it considers two possibilities:

    1. this is the end of the query
    2. the user has made a mistake. Maybe they accidentally pasted the `match` here.

    Because of case 2, ANTLR will parse beyond the `match` in order to produce a more helpful error message.
    This causes memory issues for very large queries, so we use the simpler "bail" strategy that will
    immediately stop when it hits `match`.
*/
parser.setErrorHandler(new BailErrorStrategy());

// This is a lazy iterator that will only consume a single query at a time, without parsing any further.
// This means it can pass arbitrarily long streams of queries in constant memory!
Iterable<T> queryIterator = () -> new AbstractIterator<T>() {
    @Nullable
    @Override
    protected T computeNext() {
        int latestToken = tokenStream.LA(1);
        if (latestToken == Token.EOF) {
            endOfData();
            return null;
        } else {
            // This will parse and consume a single query, even if it doesn't reach an EOF
            // When we next run it, it will start where it left off in the stream
            return (T) QUERY.parse(parser, errorListener);
        }
    }
};

return StreamSupport.stream(queryIterator.spliterator(), false);

}

```

项目:mini-markdown    文件:CharsAsTokens.java   
@Override
public TokenFactory<?> getTokenFactory() {
    return CommonTokenFactory.DEFAULT;
}
项目:goworks    文件:CodeCompletionTokenSource.java   
@Override
public void setTokenFactory(TokenFactory tokenFactory) {
    source.setTokenFactory(tokenFactory);
    this.tokenFactory = tokenFactory != null ? tokenFactory : CommonTokenFactory.DEFAULT;
}
项目:antlrworks2    文件:CodeCompletionTokenSource.java   
@Override
public void setTokenFactory(TokenFactory tokenFactory) {
    source.setTokenFactory(tokenFactory);
    this.tokenFactory = tokenFactory != null ? tokenFactory : CommonTokenFactory.DEFAULT;
}