Java 类org.antlr.v4.runtime.TokenStream 实例源码

项目:CTalk    文件:App.java   
/**
 * @param args the command line arguments
 * @throws java.io.IOException
 * @throws java.net.URISyntaxException
 */
public static void main(String[] args) throws IOException, URISyntaxException {
    final String entryPoint;
    final URL res;
    switch (args.length) {
    case 2:
        res = Paths.get(args[0]).toUri().toURL();
        entryPoint = args[1];
        break;
    default:
        System.err.println("Supply two parameters in the following order:\n- file name of the main function\n- name of the main function\n\nFor example: hello.ct main:argc:argv");
        return;
    }
    final CharStream inp = CharStreams.fromStream(res.openStream());
    final GrammarLexer lex = new GrammarLexer(inp);
    final TokenStream toks = new CommonTokenStream(lex);
    final GrammarParser parser = new GrammarParser(toks);
    System.out.println(new Translator(res.toURI()).generate(parser.program(), entryPoint));
}
项目:coherence-sql    文件:SqlParser.java   
/**
 * @throws InvalidQueryException if the given query contains invalid characters or is incomplete
 */
public SqlGrammarParser.SelectStatementContext parse(String query) {
    log.trace("About to parse [{}]", query);
    long now = currentTimeMillis();

    CharStream input = CharStreams.fromString(query);
    SqlGrammarLexer lexer = new SqlGrammarLexer(input);
    TokenStream tokenStream = new CommonTokenStream(lexer);
    SqlGrammarParser parser = new SqlGrammarParser(tokenStream);

    parser.removeErrorListeners();
    parser.addErrorListener(new RaiseExceptionErrorListener());

    SqlGrammarParser.SelectStatementContext result = parser.selectStatement();

    log.trace("Successfully parsed [{}] into [{}] in [{}ms]",
            query, result.toStringTree(parser), currentTimeMillis() - now);

    return result;
}
项目:rapidminer    文件:CapitulatingErrorStrategy.java   
@Override
protected void reportNoViableAlternative(Parser recognizer, NoViableAltException e) {
    // change error message from default implementation
    TokenStream tokens = recognizer.getInputStream();
    String input;
    if (tokens != null) {
        if (e.getStartToken().getType() == Token.EOF) {
            input = "the end";
        } else {
            input = escapeWSAndQuote(tokens.getText(e.getStartToken(), e.getOffendingToken()));
        }
    } else {
        input = escapeWSAndQuote("<unknown input>");
    }
    String msg = "inadmissible input at " + input;
    recognizer.notifyErrorListeners(e.getOffendingToken(), msg, e);
}
项目:ts-swift-transpiler    文件:SwiftSupport.java   
/**
 "If an operator has whitespace on the right side only, it is treated as a
 postfix unary operator. As an example, the ++ operator in a++ b is treated
 as a postfix unary operator."
 "If an operator has no whitespace on the left but is followed immediately
 by a dot (.), it is treated as a postfix unary operator. As an example,
 the ++ operator in a++.b is treated as a postfix unary operator (a++ .b
 rather than a ++ .b)."
 */
public static boolean isPostfixOp(TokenStream tokens) {
    int stop = getLastOpTokenIndex(tokens);
    if ( stop==-1 ) return false;

    int start = tokens.index();
    Token prevToken = tokens.get(start-1); // includes hidden-channel tokens
    Token nextToken = tokens.get(stop+1);
    boolean prevIsWS = isLeftOperatorWS(prevToken);
    boolean nextIsWS = isRightOperatorWS(nextToken);
    boolean result =
        !prevIsWS && nextIsWS ||
        !prevIsWS && nextToken.getType()==SwiftParser.DOT;
    String text = tokens.getText(Interval.of(start, stop));
    //System.out.println("isPostfixOp: '"+prevToken+"','"+text+"','"+nextToken+"' is "+result);
    return result;
}
项目:rest-modeling-framework    文件:TypeExpressionConstructor.java   
public EObject parse(final String typeExpression, final Scope scope) {
    final CharStream charStream = CharStreams.fromString(typeExpression);
    final TypeExpressionLexer lexer = new TypeExpressionLexer(charStream);
    final TokenStream tokenStream = new CommonTokenStream(lexer);
    final TypeExpressionParser typeExpressionParser = new TypeExpressionParser(tokenStream);

    lexer.removeErrorListeners();
    typeExpressionParser.removeErrorListeners();

    final ParserErrorCollector errorCollector = new ParserErrorCollector();
    lexer.addErrorListener(errorCollector);
    typeExpressionParser.addErrorListener(errorCollector);

    final TypeExpressionParser.Type_exprContext typeExpr = typeExpressionParser.type_expr();

    final EObject anyType = new TypeExpressionBuilder(scope, ARRAY_TYPE).visit(typeExpr);
    scope.getResource().getErrors().addAll(errorCollector.getErrors());

    return anyType;
}
项目:rest-modeling-framework    文件:RamlResource.java   
@Override
protected void doLoad(final InputStream inputStream, final Map<?, ?> options) throws IOException {
    final BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream);
    final Optional<AbstractConstructor> optionalRootConstructor = getRootConstructor(bufferedInputStream);

    if (optionalRootConstructor.isPresent()) {
        final AbstractConstructor rootConstructor = optionalRootConstructor.get();
        final RAMLCustomLexer lexer = new RAMLCustomLexer(uri, getURIConverter());
        final TokenStream tokenStream = new CommonTokenStream(lexer);
        final RAMLParser parser = new RAMLParser(tokenStream);

        parser.removeErrorListeners();
        final ParserErrorCollector errorCollector = new ParserErrorCollector();
        parser.addErrorListener(errorCollector);

        try {
            rootConstructor.construct(parser, resourceScope);
            validate();
        } catch (final Exception e) {
            getErrors().addAll(errorCollector.getErrors());
            throw e;
        }
    }
}
项目:rest-modeling-framework    文件:TypeDeclarationResolverTest.java   
@Ignore
@Test
public void api() {
    final File apiFile = new File("/Users/mkoester/Development/commercetools-api-reference/api.raml");
    assumeTrue(apiFile.exists());

    final URI apiUri = URI.createURI(apiFile.toURI().toString());
    final URIConverter uriConverter = new RamlResourceSet().getURIConverter();
    final RAMLCustomLexer lexer = new RAMLCustomLexer(apiUri, uriConverter);
    final TokenStream tokenStream = new CommonTokenStream(lexer);

    final RAMLParser parser = new RAMLParser(tokenStream);
    final Resource resource = new RamlResourceSet().createResource(apiUri);

    final Scope scope = Scope.of(resource);
    final TypeDeclarationResolver resolver = new TypeDeclarationResolver();
    resolver.resolve(parser.api(), scope);

    assertThat(resource.getErrors()).isEmpty();
}
项目:beaker-notebook-archive    文件:GrammarPredicates.java   
public static boolean isClassName(TokenStream _input) {
  try {
    int i=1;
    Token token = _input.LT(i);
    while (token!=null && i < _input.size() && _input.LT(i+1).getType() == GroovyParser.DOT) {
      i = i + 2;
      token = _input.LT(i);
    }
    if(token==null)
      return false;
    // TODO here
    return Character.isUpperCase(Character.codePointAt(token.getText(), 0));
  } catch(Exception e) {
    e.printStackTrace();
  }

  return false;
}
项目:antlr-examples    文件:ParserUtil.java   
public static <L extends Lexer, P extends Parser> P newParser(
    Function<CharStream, L> lexerFactory,
    Function<TokenStream, P> parserFactory,
    String input,
    boolean useBailErrorStrategy,
    boolean removeErrorListeners) {
  CharStream charStream = new ANTLRInputStream(input);
  L lexer = lexerFactory.apply(charStream);
  if (removeErrorListeners) {
    lexer.removeErrorListeners();
  }

  TokenStream tokenStream = new CommonTokenStream(lexer);
  P parser = parserFactory.apply(tokenStream);
  if (useBailErrorStrategy) {
    parser.setErrorHandler(new BailErrorStrategy());
  }
  if (removeErrorListeners) {
    parser.removeErrorListeners();
  }

  return parser;
}
项目:stvs    文件:IntervalParser.java   
/**
 * Parse an interval, for example <tt>[1,-]</tt> or <tt>-</tt> (a wildcard) or <tt>[1,4]</tt>.
 * Only fixed values are allowed, no variables.
 *
 * @param intervalAsString the string to be parsed.
 * @return a LowerBoundedInterval as the runtime representation of interval strings.
 * @throws ParseException in case the string doesn't fit the given fixed-interval grammar.
 */
public static LowerBoundedInterval parse(String intervalAsString) throws ParseException {
  CharStream charStream = new ANTLRInputStream(intervalAsString);
  CellExpressionLexer lexer = new CellExpressionLexer(charStream);
  TokenStream tokens = new CommonTokenStream(lexer);
  CellExpressionParser parser = new CellExpressionParser(tokens);
  parser.removeErrorListeners();
  parser.addErrorListener(new ThrowingErrorListener());
  try {
    CellExpressionParser.Fixed_intervalContext ctx = parser.fixed_interval();
    if (ctx == null) {
      throw new ParseException(0, 0, "Expected fixed interval");
    }
    return INSTANCE.visit(ctx);
  } catch (ParseRuntimeException runtimeException) {
    throw runtimeException.getParseException();
  }
}
项目:kalang    文件:AstBuilderFactory.java   
public static AstBuilder createAstBuilder(CompilationUnit source,TokenStream tokens){
    KalangParser p = new KalangParser(tokens);
    AstBuilder sp = new AstBuilder(source, p);
    p.setErrorHandler(new DefaultErrorStrategy() {

        @Override
        public void reportError(Parser recognizer, RecognitionException e) {
            String msg = AntlrErrorString.exceptionString(recognizer, e);
            Token end = e.getOffendingToken();
            Token start;
            RuleContext ctx = e.getCtx();
            if(ctx instanceof ParserRuleContext){
                start = ((ParserRuleContext) ctx).getStart();
            }else{
                start = end;
            }
            sp.getDiagnosisReporter().report(Diagnosis.Kind.ERROR, msg,start,end);
        }
    });
    return sp;
}
项目:swift-js-transpiler    文件:SwiftSupport.java   
/**
 "If an operator has whitespace on the right side only, it is treated as a
 postfix unary operator. As an example, the ++ operator in a++ b is treated
 as a postfix unary operator."
 "If an operator has no whitespace on the left but is followed immediately
 by a dot (.), it is treated as a postfix unary operator. As an example,
 the ++ operator in a++.b is treated as a postfix unary operator (a++ .b
 rather than a ++ .b)."
 */
public static boolean isPostfixOp(TokenStream tokens) {
    int stop = getLastOpTokenIndex(tokens);
    if ( stop==-1 ) return false;

    int start = tokens.index();
    Token prevToken = tokens.get(start-1); // includes hidden-channel tokens
    Token nextToken = tokens.get(stop+1);
    boolean prevIsWS = isLeftOperatorWS(prevToken);
    boolean nextIsWS = isRightOperatorWS(nextToken);
    boolean result =
        !prevIsWS && nextIsWS ||
        !prevIsWS && nextToken.getType()==SwiftParser.DOT;
    String text = tokens.getText(Interval.of(start, stop));
    //System.out.println("isPostfixOp: '"+prevToken+"','"+text+"','"+nextToken+"' is "+result);
    return result;
}
项目:clarpse    文件:ClarpseGoCompiler.java   
private void compileFiles(List<RawFile> files, OOPSourceCodeModel srcModel, List<String> projectFileTypes) {
    for (RawFile file : files) {
        try {
            CharStream charStream = new ANTLRInputStream(file.content());
            GolangLexer lexer = new GolangLexer(charStream);
            TokenStream tokens = new CommonTokenStream(lexer);
            GolangParser parser = new GolangParser(tokens);
            SourceFileContext sourceFileContext = parser.sourceFile();
            parser.setErrorHandler(new BailErrorStrategy());
            parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
            ParseTreeWalker walker = new ParseTreeWalker();
            GolangBaseListener listener = new GoLangTreeListener(srcModel, projectFileTypes, file);
            walker.walk(listener, sourceFileContext);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
项目:carbon-gateway-framework    文件:WUMLSyntaxTest.java   
private SyntaxErrorListener getSyntaxTreeErrors(String iFlowResource) {
    InputStream inputStream = null;

    try {
        File file = new File(getClass().getResource(iFlowResource).getFile());
        inputStream = new FileInputStream(file);

        CharStream cs = new ANTLRInputStream(inputStream);
        TokenStream tokenStream = new CommonTokenStream(new WUMLLexer(cs));
        WUMLParser parser = new WUMLParser(tokenStream);
        SyntaxErrorListener errorListener = new SyntaxErrorListener();
        parser.addErrorListener(errorListener);
        ParseTree tree = parser.sourceFile();
        return errorListener;

    } catch (Exception e) {
        return null;
    }
}
项目:RankPL    文件:RankPL.java   
public static Program parse(String source) {
    RankPLLexer lexer = new RankPLLexer(new ANTLRInputStream(source));
    TokenStream tokens = new CommonTokenStream(lexer);
    RankPLParser parser = new RankPLParser(tokens);
    parser.setErrorHandler(new BailErrorStrategy());
    ConcreteParser classVisitor = new ConcreteParser();

    // Parse
    Program program = null;
    try {
        program = (Program) classVisitor.visit(parser.program());
    } catch (ParseCancellationException e) {
        System.out.println("Syntax error");
        lexer = new RankPLLexer(new ANTLRInputStream(source));
        tokens = new CommonTokenStream(lexer);
        parser = new RankPLParser(tokens);
        classVisitor = new ConcreteParser();
        try {
            program = (Program) classVisitor.visit(parser.program());
        } catch (Exception ex) {
            // Ignore
        }
        return null;
    }
    return program;
}
项目:antlr-examples    文件:ParserUtil.java   
public static <L extends Lexer, P extends Parser> P newParser(
    Function<CharStream, L> lexerFactory,
    Function<TokenStream, P> parserFactory,
    String input,
    boolean useBailErrorStrategy,
    boolean removeErrorListeners) {
  CharStream charStream = new ANTLRInputStream(input);
  L lexer = lexerFactory.apply(charStream);
  if (removeErrorListeners) {
    lexer.removeErrorListeners();
  }

  TokenStream tokenStream = new CommonTokenStream(lexer);
  P parser = parserFactory.apply(tokenStream);
  if (useBailErrorStrategy) {
    parser.setErrorHandler(new BailErrorStrategy());
  }
  if (removeErrorListeners) {
    parser.removeErrorListeners();
  }

  return parser;
}
项目:mdetect    文件:ParseTreeDOMSerializer.java   
public ParseTreeDOMSerializer(List<String> ruleNames, Map<Integer, String> invTokenMap, TokenStream tokenStream) {
    this.tokenStream = tokenStream;
      this.ruleNames = ruleNames;
      this.invTokenMap = invTokenMap;
      nodeStack = new Stack<Element>();

DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
Document document = null;
try {
    DocumentBuilder builder = factory.newDocumentBuilder();
    document = builder.newDocument();
    Element root = (Element) document.createElement("ast");
    document.appendChild(root);
    nodeStack.push(root);
    domDoc = document;
} catch (Exception pce) {
    pce.printStackTrace();
}
  }
项目:mdetect    文件:ParseUtils.java   
public static Document processFile(String filePath) {
    Pair<Parser, Lexer> pl = parsePHP(filePath);
    PHPParser parser = (PHPParser) pl.a;
    parser.setBuildParseTree(true);
    /* 
     * htmlDocument is the start rule (the top-level rule)
     * for the PHP grammar
     */
    ParserRuleContext tree =   parser.htmlDocument();
    List<String> ruleNames = Arrays.asList(parser.getRuleNames());
    Map<Integer, String> invTokenMap = getInvTokenMap(parser);
    TokenStream tokenStream = parser.getTokenStream();
    ParseTreeDOMSerializer ptSerializer = new ParseTreeDOMSerializer(ruleNames, invTokenMap, tokenStream);
    ParseTreeWalker.DEFAULT.walk(ptSerializer, tree);
    Document result= ptSerializer.getDOMDocument();
    return result;
}
项目:ksql    文件:KsqlParserErrorStrategy.java   
protected void reportNoViableAlternative(Parser recognizer, NoViableAltException e) {
  TokenStream tokens = recognizer.getInputStream();
  String input;
  if (tokens != null) {
    if (e.getStartToken().getType() == -1) {
      input = "<EOF>";
    } else {
      input = tokens.getText(e.getStartToken(), e.getOffendingToken());
    }
  } else {
    input = "<unknown input>";
  }

  String msg = "no viable alternative at input " + this.escapeWSAndQuote(input);
  recognizer.notifyErrorListeners(e.getOffendingToken(), msg, e);
}
项目:rapidminer-studio    文件:CapitulatingErrorStrategy.java   
@Override
protected void reportNoViableAlternative(Parser recognizer, NoViableAltException e) {
    // change error message from default implementation
    TokenStream tokens = recognizer.getInputStream();
    String input;
    if (tokens != null) {
        if (e.getStartToken().getType() == Token.EOF) {
            input = "the end";
        } else {
            input = escapeWSAndQuote(tokens.getText(e.getStartToken(), e.getOffendingToken()));
        }
    } else {
        input = escapeWSAndQuote("<unknown input>");
    }
    String msg = "inadmissible input at " + input;
    recognizer.notifyErrorListeners(e.getOffendingToken(), msg, e);
}
项目:MPL    文件:AutoCompletion.java   
public static @Nullable AutoCompletionContext getContext(int index, String text) {
  ANTLRInputStream input = new ANTLRInputStream(text);
  MplLexer lexer = new MplLexer(input);
  lexer.removeErrorListeners();
  TokenStream tokens = new CommonTokenStream(lexer);
  MplParser parser = new MplParser(tokens);
  parser.removeErrorListeners();
  FileContext ctx = parser.file();

  AutoCompletionListener listener = new AutoCompletionListener(index);
  try {
    new ParseTreeWalker().walk(listener, ctx);
  } catch (ResultException earlyExit) {
    return earlyExit.getResult();
  }
  return null;
}
项目:transportlanguage    文件:OTLDListener.java   
/**
 * Parses the supplied input using the OTLDListener and returns it after walking it
 * @param reader input to parse
 * @return walked OTLDListener
 * @throws IOException
 */
public static OTLDListener parseFile(InputStream reader) throws IOException {
    OTLDErrorListener errorListener = new OTLDErrorListener();
    ANTLRInputStream stream = new ANTLRInputStream(reader);

    Lexer lexer = new otldLexer(stream);
    lexer.removeErrorListeners();
    lexer.addErrorListener(errorListener);
    TokenStream tokens = new CommonTokenStream(lexer);

    otldParser parser = new otldParser(tokens);
    parser.removeErrorListeners();
    parser.addErrorListener(errorListener);
    ParseTree tree = parser.program();

    OTLDListener railroad = new OTLDListener();

    if (errorListener.getErrors().isEmpty()) {
        ParseTreeWalker walker = new ParseTreeWalker();
        walker.walk(railroad, tree);
    } else {
        railroad.errors.addAll(errorListener.getErrors());
    }

    return railroad;
}
项目:StreamCQL    文件:CQLErrorStrategy.java   
/**
 * {@inheritDoc}
 */
@Override
public void reportNoViableAlternative(@NotNull Parser recognizer, @NotNull NoViableAltException e)
{
    TokenStream tokens = recognizer.getInputStream();
    String input;
    if (tokens instanceof TokenStream)
    {
        if (e.getStartToken().getType() == Token.EOF)
            input = "<EOF>";
        else
            input = getText(tokens, e.getStartToken(), e.getOffendingToken());
    }
    else
    {
        input = "<unknown input>";
    }
    String msg = "no viable alternative at input " + escapeWSAndQuote(input);
    recognizer.notifyErrorListeners(e.getOffendingToken(), msg, e);
}
项目:StreamCQL    文件:CQLErrorStrategy.java   
@NotNull
private String getText(TokenStream tokens, Interval interval)
{
    int start = interval.a;
    int stop = interval.b;
    if (start < 0 || stop < 0)
        return "";

    if (stop >= tokens.size())
        stop = tokens.size() - 1;

    StringBuilder buf = new StringBuilder();
    for (int i = start; i <= stop; i++)
    {
        Token t = tokens.get(i);
        if (t.getType() == Token.EOF)
            break;
        buf.append(t.getText());
        if (i != stop)
        {
            buf.append(" ");
        }
    }
    return buf.toString();
}
项目:MiniJCompiler    文件:MiniJParserTest.java   
@Test
public void testParseWorkingExamples() throws IOException {
    FileVisitor<Path> workingFilesVisitior = new SimpleFileVisitor<Path>() {
        @Override
        public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
            System.out.println("Testing parser input from file \""+file.toString()+"\"");
            ANTLRFileStream antlrStream = new ANTLRFileStream(file.toString());
            MiniJLexer lexer = new MiniJLexer(antlrStream);
            TokenStream tokens = new CommonTokenStream(lexer);
            MiniJParser parser = new MiniJParser(tokens);
            parser.setErrorHandler(new BailErrorStrategy());
            parser.prog();
            return super.visitFile(file, attrs);
        }
    };
    Files.walkFileTree(EXAMPLE_PROGRAM_PATH_WORKING, workingFilesVisitior);
}
项目:MiniJCompiler    文件:MiniJParserTest.java   
@Test
public void testParseFailingExamples() throws IOException {
    FileVisitor<Path> workingFilesVisitior = new SimpleFileVisitor<Path>() {
        @Override
        public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
            System.out.println("Testing parser input from file \""+file.toString()+"\"");
            ANTLRFileStream antlrStream = new ANTLRFileStream(file.toString());
            MiniJLexer lexer = new MiniJLexer(antlrStream);
            TokenStream tokens = new CommonTokenStream(lexer);
            MiniJParser parser = new MiniJParser(tokens);
            parser.setErrorHandler(new BailErrorStrategy());
            /*
             * Catch all exceptions first, to ensure that every single
             * compilation unit exits with an Exception. Otherwise, this
             * method will return after the first piece of code.
             */
            try {
                parser.prog();
                fail("The example "+file.toString()+" should have failed, but was accepted by the parser.");
            } catch (ParseCancellationException e) {
            }
            return super.visitFile(file, attrs);
        }
    };
    Files.walkFileTree(EXAMPLE_PROGRAM_PATH_FAILING, workingFilesVisitior);
}
项目:MiniJCompiler    文件:TypeCheckVisitorTest.java   
@Test
public void testVisitTypeErrorExamples() throws Exception {
    FileVisitor<Path> failingFilesVisitior = new SimpleFileVisitor<Path>() {
        @Override
        public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
            if (file.toString().endsWith("LinkedListBUG.java")) {
                return super.visitFile(file, attrs);
            }
            System.out.println("Testing type checker with file \""+file.toString()+"\"");
            ANTLRFileStream reader = new ANTLRFileStream(file.toString());
            MiniJLexer lexer = new MiniJLexer((CharStream) reader);
            TokenStream tokens = new CommonTokenStream(lexer);
            MiniJParser parser = new MiniJParser(tokens);
            ParseTree parseTree = parser.prog();
            ASTVisitor astVisitor = new ASTVisitor();
            Program ast = (Program) astVisitor.visit(parseTree);
            TypeInferenceVisitor typeInferenceVisitor = new TypeInferenceVisitor();
            ast.accept(typeInferenceVisitor);
            TypeCheckVisitor visitor = new TypeCheckVisitor();
            boolean typesCorrect = ast.accept(visitor);
            assertFalse("\"" + file.toString() + "\" passed type check but it shouldn't", typesCorrect);
            return super.visitFile(file, attrs);
        }
    };
    Files.walkFileTree(EXAMPLE_PROGRAM_PATH_FAILING, failingFilesVisitior);
}
项目:metron    文件:BaseStellarProcessor.java   
/**
 * Parses and evaluates the given Stellar expression, {@code rule}.
 * @param rule The Stellar expression to parse and evaluate.
 * @return The Expression, which can be reevaluated without reparsing in different Contexts and Resolvers.
 */
public static StellarCompiler.Expression compile(final String rule) {
  if (rule == null || isEmpty(rule.trim())) {
    return null;
  }

  ANTLRInputStream input = new ANTLRInputStream(rule);
  StellarLexer lexer = new StellarLexer(input);
  lexer.removeErrorListeners();
  lexer.addErrorListener(new ErrorListener());
  TokenStream tokens = new CommonTokenStream(lexer);
  StellarParser parser = new StellarParser(tokens);

  StellarCompiler treeBuilder = new StellarCompiler(
      ArithmeticEvaluator.INSTANCE,
      NumberLiteralEvaluator.INSTANCE,
      ComparisonExpressionWithOperatorEvaluator.INSTANCE
  );
  parser.addParseListener(treeBuilder);
  parser.removeErrorListeners();
  parser.addErrorListener(new ErrorListener());
  parser.transformation();
  return treeBuilder.getExpression();
}
项目:Q-Programming-Language    文件:Main.java   
@Override
public String visitIncludeFile(QPLParser.IncludeFileContext ctx) {
    String filePath = ctx.path.getText().trim();
    filePath = filePath.substring(1, filePath.length() - 1);
    if (filePath.charAt(0) != '/') {
        filePath = Paths.get(__FILE_PATH__, filePath).toString();
    }
    try {
        ANTLRInputStream ais = new ANTLRFileStream(filePath);
        QPLLexer lex = new QPLLexer(ais);
        TokenStream toks = new CommonTokenStream(lex);
        QPLParser parse = new QPLParser(toks);
        ParseTree tree = parse.prog();
        return new ImpVisitor(filePath).visit(tree);
    } catch (IOException ex) {
        System.err.println(filePath + " cannot be found! Ignoring");
        return "";
    }
}
项目:goworks    文件:GoParserFactory.java   
@Override
public int adaptivePredict(TokenStream input, int decision, ParserRuleContext outerContext) {
    if (decision == QID_DECISION && QID_DECISION >= 0) {
        if (input.LA(1) == GoParser.IDENTIFIER) {
            if (input.LA(2) == GoParser.Dot) {
                if (input.LA(3) == GoParser.IDENTIFIER) {
                    return qidPredicate.eval(parser, outerContext) ? 1 : 2;
                } else {
                    assert input.LA(3) != CaretToken.CARET_TOKEN_TYPE;
                    return 2;
                }
            } else {
                assert input.LA(2) != CaretToken.CARET_TOKEN_TYPE;
                return 2;
            }
        }
    }

    return super.adaptivePredict(input, decision, outerContext);
}
项目:goworks    文件:ParserFactory.java   
@NonNull
public CodeCompletionGoParser getParser(@NonNull TokenStream input) {
    CodeCompletionGoParser parser = createParser(input);

    parser.removeErrorListeners();
    parser.setBuildParseTree(false);
    parser.setErrorHandler(new DefaultErrorStrategy());
    parser.getInterpreter().setPredictionMode(PredictionMode.LL);
    parser.getInterpreter().force_global_context = false;
    parser.getInterpreter().always_try_local_context = true;

    parser.setCheckPackageNames(false);
    parser.setPackageNames(Collections.<String>emptyList());

    return parser;
}
项目:goworks    文件:CompletionParserATNSimulator.java   
@Override
public int adaptivePredict(TokenStream input, int decision, ParserRuleContext outerContext) {
    if (decision == QID_DECISION && QID_DECISION >= 0) {
        if (input.LA(1) == GoParser.IDENTIFIER) {
            if (input.LA(2) == GoParser.Dot) {
                if (input.LA(3) == GoParser.IDENTIFIER) {
                    return qidPredicate.eval(parser, outerContext) ? 1 : 2;
                } else if (input.LA(3) != CaretToken.CARET_TOKEN_TYPE) {
                    return 2;
                }
            } else if (input.LA(2) != CaretToken.CARET_TOKEN_TYPE) {
                return 2;
            }
        }
    }

    return super.adaptivePredict(input, decision, outerContext);
}
项目:goworks    文件:AbstractParserCache.java   
public T getParser(TokenStream input) {
    T parser = null;
    synchronized (parsers) {
        while (parser == null && !parsers.isEmpty()) {
            parser = parsers.poll().get();
        }
    }

    if (parser != null) {
        parser.setInputStream(input);
    } else {
        parser = createParser(input);
    }

    return parser;
}
项目:criteria    文件:ThrowExceptionErrorListener.java   
@Override
public final void syntaxError(final Recognizer<?, ?> recognizer,
    final Object offendingSymbol, final int line,
    final int charPositionInLine, final String msg,
    final RecognitionException e) {
  String input;
  if (recognizer instanceof Lexer) {
    final CharStream cs = ((Lexer) recognizer).getInputStream();
    input = cs.getText(new Interval(0, cs.size()));
  } else if (recognizer instanceof Parser) {
    final TokenStream tokens = ((Parser) recognizer).getInputStream();
    if (tokens != null) {
      input = tokens.getText();
    } else {
      input = "<unknown input>";
    }
  } else {
    input = "<unknown input>";
  }
  throw new AntlrParseException(input, line, charPositionInLine, msg);
}
项目:pinot    文件:Pql2Compiler.java   
@Override
public TransformExpressionTree compileToExpressionTree(String expression) {
  CharStream charStream = new ANTLRInputStream(expression);
  PQL2Lexer lexer = new PQL2Lexer(charStream);
  lexer.setTokenFactory(new CommonTokenFactory(true));
  TokenStream tokenStream = new UnbufferedTokenStream<CommonToken>(lexer);
  PQL2Parser parser = new PQL2Parser(tokenStream);
  parser.setErrorHandler(new BailErrorStrategy());

  // Parse
  ParseTree parseTree = parser.expression();

  ParseTreeWalker walker = new ParseTreeWalker();
  Pql2AstListener listener = new Pql2AstListener(expression, _splitInClause);
  walker.walk(listener, parseTree);

  final AstNode rootNode = listener.getRootNode();
  return TransformExpressionTree.buildTree(rootNode);
}
项目:antlr-denter    文件:ParserUtils.java   
public static <P extends Parser> P getParser(Class<? extends Lexer> lexerClass, Class<P> parserClass, String source) {
  Lexer lexer = getLexer(lexerClass, source);
  TokenStream tokens = new CommonTokenStream(lexer);

  P parser;
  try {
    parser = parserClass.getConstructor(TokenStream.class).newInstance(tokens);
  } catch (Exception e) {
    throw new IllegalArgumentException("couldn't invoke parser constructor", e);
  }
  parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);
  parser.removeErrorListeners(); // don't spit to stderr
  parser.addErrorListener(new DiagnosticErrorListener());
  parser.addErrorListener(new AntlrFailureListener());

  return parser;
}
项目:beakerx    文件:GrammarPredicates.java   
public static boolean isClassName(TokenStream _input) {
  try {
    int i=1;
    Token token = _input.LT(i);
    while (token!=null && i < _input.size() && _input.LT(i+1).getType() == GroovyParser.DOT) {
      i = i + 2;
      token = _input.LT(i);
    }
    if(token==null)
      return false;
    // TODO here
    return Character.isUpperCase(Character.codePointAt(token.getText(), 0));
  } catch(Exception e) {
    e.printStackTrace();
  }

  return false;
}
项目:intellij-plugin-v4    文件:ProfilerPanel.java   
public Token addDecisionEventHighlighter(PreviewState previewState, MarkupModel markupModel,
                                         DecisionEventInfo info, Color errorStripeColor,
                                         EffectType effectType) {
    TokenStream tokens = previewState.parsingResult.parser.getInputStream();
    Token startToken = tokens.get(info.startIndex);
    Token stopToken = tokens.get(info.stopIndex);
    TextAttributes textAttributes =
        new TextAttributes(JBColor.BLACK, JBColor.WHITE, errorStripeColor,
                           effectType, Font.PLAIN);
    textAttributes.setErrorStripeColor(errorStripeColor);
    final RangeHighlighter rangeHighlighter =
        markupModel.addRangeHighlighter(
            startToken.getStartIndex(), stopToken.getStopIndex()+1,
            HighlighterLayer.ADDITIONAL_SYNTAX, textAttributes,
            HighlighterTargetArea.EXACT_RANGE);
    rangeHighlighter.putUserData(DECISION_EVENT_INFO_KEY, info);
    rangeHighlighter.setErrorStripeMarkColor(errorStripeColor);
    return startToken;
}
项目:antlrworks2    文件:AbstractParserCache.java   
public T getParser(TokenStream input) {
    T parser = null;
    synchronized (parsers) {
        while (parser == null && !parsers.isEmpty()) {
            parser = parsers.poll().get();
        }
    }

    if (parser != null) {
        parser.setInputStream(input);
    } else {
        parser = createParser(input);
    }

    return parser;
}
项目:bibrarian    文件:Bibitem.java   
/**
 * Parse.
 * @param text Text to parse
 * @return Map of tags
 * @throws BibSyntaxException If fails
 */
private static Map<String, String> parse(final String text)
    throws BibSyntaxException {
    final BibLexer lexer = new BibLexer(new ANTLRInputStream(text));
    final TokenStream tokens = new CommonTokenStream(lexer);
    final BibParser parser = new BibParser(tokens);
    final Errors errors = new Errors();
    lexer.addErrorListener(errors);
    parser.addErrorListener(errors);
    final Map<String, String> map;
    try {
        map = parser.tags().map;
    } catch (final RecognitionException ex) {
        throw new BibSyntaxException(ex);
    }
    if (!Iterables.isEmpty(errors)) {
        throw new BibSyntaxException(
            Joiner.on("; ").join(errors)
        );
    }
    return map;
}