Java 类org.antlr.v4.runtime.LexerNoViableAltException 实例源码

项目:elasticsearch_my    文件:EnhancedPainlessLexer.java   
@Override
public void recover(final LexerNoViableAltException lnvae) {
    final CharStream charStream = lnvae.getInputStream();
    final int startIndex = lnvae.getStartIndex();
    final String text = charStream.getText(Interval.of(startIndex, charStream.index()));

    Location location = new Location(sourceName, _tokenStartCharIndex);
    String message = "unexpected character [" + getErrorDisplay(text) + "].";
    char firstChar = text.charAt(0);
    if ((firstChar == '\'' || firstChar == '"') && text.length() - 2 > 0 && text.charAt(text.length() - 2) == '\\') {
        /* Use a simple heuristic to guess if the unrecognized characters were trying to be a string but has a broken escape sequence.
         * If it was add an extra message about valid string escape sequences. */
        message += " The only valid escape sequences in strings starting with [" + firstChar + "] are [\\\\] and [\\"
                + firstChar + "].";
    }
    throw location.createError(new IllegalArgumentException(message, lnvae));
}
项目:Scratch-ApuC    文件:LexerATNSimulator.java   
protected int failOrAccept(SimState prevAccept, CharStream input,
                           ATNConfigSet reach, int t)
{
    if (prevAccept.dfaState != null) {
        LexerActionExecutor lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor;
        accept(input, lexerActionExecutor, startIndex,
            prevAccept.index, prevAccept.line, prevAccept.charPos);
        return prevAccept.dfaState.prediction;
    }
    else {
        // if no accept and EOF is first char, return EOF
        if ( t==IntStream.EOF && input.index()==startIndex ) {
            return Token.EOF;
        }

        throw new LexerNoViableAltException(recog, input, startIndex, reach);
    }
}
项目:intellij-plugin-v4    文件:ParsingUtils.java   
public static SyntaxError getErrorUnderCursor(java.util.List<SyntaxError> errors, int offset) {
    for (SyntaxError e : errors) {
        int a, b;
        RecognitionException cause = e.getException();
        if ( cause instanceof LexerNoViableAltException) {
            a = ((LexerNoViableAltException) cause).getStartIndex();
            b = ((LexerNoViableAltException) cause).getStartIndex()+1;
        }
        else {
            Token offendingToken = (Token)e.getOffendingSymbol();
            a = offendingToken.getStartIndex();
            b = offendingToken.getStopIndex()+1;
        }
        if ( offset >= a && offset < b ) { // cursor is over some kind of error
            return e;
        }
    }
    return null;
}
项目:freelib-edtf    文件:ParserErrorListener.java   
public void syntaxError(Recognizer<?, ?> aRecognizer,
        Object aOffendingSymbol, int aLine, int aCharIndex,
        String aMessage, RecognitionException aException) {
    ANTLRErrorStrategy errorHandler = myParser.getErrorHandler();

    if (LOGGER.isWarnEnabled()) {
        LOGGER.warn(aMessage + " [" + aLine + ":" + aCharIndex + "]");
    }

    /*
     * Setting the lexer exception in the parser since I don't see a
     * getNumberOfSyntaxErrors() method in the lexer (like in antlr3) and
     * the lexer's errors aren't being reported by parser's method
     * 
     * I may just be missing the correct way this should be handled(?)
     */
    if (aException instanceof LexerNoViableAltException) {
        NoViableAltException exception = new NoViableAltException(myParser);
        errorHandler.reportError(myParser, exception);
    }
    else {
        errorHandler.reportError(myParser, aException);
    }
}
项目:esper    文件:Antlr4ErrorStrategy.java   
public void reportError(Parser recognizer, RecognitionException e) {
    // Antlr has an issue handling LexerNoViableAltException as then offending token can be null
    // Try: "select a.b('aa\") from A"
    if (e instanceof LexerNoViableAltException && e.getOffendingToken() == null) {
        return;
    }
    super.reportError(recognizer, e);
}
项目:vzome-core    文件:ZomicASTCompiler.java   
@Override
public void recover(LexerNoViableAltException e) {
    // Bail out of the lexer at the first lexical error instead of trying to recover.
    // Use this in conjunction with BailErrorStrategy.
    // Wrap the LexerNoViableAltException in a RuntimeWrapperException
    // to be sure the lexer doesn't handle the LexerNoViableAltException.
    // The LexerNoViableAltException will be extracted from the RuntimeWrapperException
    // inside the compile() method below and re-thrown without the RuntimeWrapperException
    // since we'll be outside of the lexer rules at that point.
    throw new RuntimeWrapperException(e); 
}
项目:intellij-plugin-v4    文件:InputPanel.java   
public void annotateErrorsInPreviewInputEditor(SyntaxError e) {
    Editor editor = getInputEditor();
    if ( editor==null ) return;
    MarkupModel markupModel = editor.getMarkupModel();

    int a, b; // Start and stop index
    RecognitionException cause = e.getException();
    if ( cause instanceof LexerNoViableAltException ) {
        a = ((LexerNoViableAltException) cause).getStartIndex();
        b = ((LexerNoViableAltException) cause).getStartIndex()+1;
    }
    else {
        Token offendingToken = (Token) e.getOffendingSymbol();
        a = offendingToken.getStartIndex();
        b = offendingToken.getStopIndex()+1;
    }
    final TextAttributes attr = new TextAttributes();
    attr.setForegroundColor(JBColor.RED);
    attr.setEffectColor(JBColor.RED);
    attr.setEffectType(EffectType.WAVE_UNDERSCORE);
    RangeHighlighter highlighter =
        markupModel.addRangeHighlighter(a,
                                        b,
                                        ERROR_LAYER, // layer
                                        attr,
                                        HighlighterTargetArea.EXACT_RANGE);
    highlighter.putUserData(SYNTAX_ERROR, e);
}
项目:rapidminer    文件:CapitulatingFunctionExpressionLexer.java   
@Override
public void recover(LexerNoViableAltException e) {
    throw new CapitulatingRuntimeException();
}
项目:ontolib    文件:OboLexer.java   
@Override
public void recover(LexerNoViableAltException e) {
  throw new OntoLibRuntimeException("There was a problem with lexing OBO file.", e); // bail out
}
项目:rainbow    文件:DelimiterLexer.java   
@Override
public Token nextToken()
{
    if (_input == null) {
        throw new IllegalStateException("nextToken requires a non-null input stream.");
    }

    // Mark start location in char stream so unbuffered streams are
    // guaranteed at least have text of current token
    int tokenStartMarker = _input.mark();
    try {
        outer:
        while (true) {
            if (_hitEOF) {
                emitEOF();
                return _token;
            }

            _token = null;
            _channel = Token.DEFAULT_CHANNEL;
            _tokenStartCharIndex = _input.index();
            _tokenStartCharPositionInLine = getInterpreter().getCharPositionInLine();
            _tokenStartLine = getInterpreter().getLine();
            _text = null;
            do {
                _type = Token.INVALID_TYPE;
                int ttype = -1;

                // This entire method is copied from org.antlr.v4.runtime.Lexer, with the following bit
                // added to match the delimiters before we attempt to match the token
                boolean found = false;
                for (String terminator : delimiters) {
                    if (match(terminator)) {
                        ttype = SqlBaseParser.DELIMITER;
                        found = true;
                        break;
                    }
                }

                if (!found) {
                    try {
                        ttype = getInterpreter().match(_input, _mode);
                    }
                    catch (LexerNoViableAltException e) {
                        notifyListeners(e);        // report error
                        recover(e);
                        ttype = SKIP;
                    }
                }

                if (_input.LA(1) == IntStream.EOF) {
                    _hitEOF = true;
                }
                if (_type == Token.INVALID_TYPE) {
                    _type = ttype;
                }
                if (_type == SKIP) {
                    continue outer;
                }
            }
            while (_type == MORE);
            if (_token == null) {
                emit();
            }
            return _token;
        }
    }
    finally {
        // make sure we release marker after match or
        // unbuffered char stream will keep buffering
        _input.release(tokenStartMarker);
    }
}
项目:boqa    文件:OboLexer.java   
@Override
public void recover(LexerNoViableAltException e) {
  throw new OntoLibRuntimeException("There was a problem with lexing OBO file.", e); // bail out
}
项目:PhyDyn    文件:PopModelBailLexer.java   
@Override
public void recover(LexerNoViableAltException e) {
    throw new RuntimeException(e);
}
项目:rapidminer-studio    文件:CapitulatingFunctionExpressionLexer.java   
@Override
public void recover(LexerNoViableAltException e) {
    throw new CapitulatingRuntimeException();
}
项目:groovy    文件:GroovyLangLexer.java   
@Override
public void recover(LexerNoViableAltException e) {
    throw e; // if some lexical error occurred, stop parsing!
}
项目:LogicNG    文件:PseudoBooleanLexer.java   
@Override
public void recover(final LexerNoViableAltException exception) {
  throw new LexerException(exception.getMessage());
}
项目:LogicNG    文件:PropositionalLexer.java   
@Override
public void recover(final LexerNoViableAltException exception) {
  throw new LexerException(exception.getMessage());
}
项目:presto    文件:DelimiterLexer.java   
@Override
public Token nextToken()
{
    if (_input == null) {
        throw new IllegalStateException("nextToken requires a non-null input stream.");
    }

    // Mark start location in char stream so unbuffered streams are
    // guaranteed at least have text of current token
    int tokenStartMarker = _input.mark();
    try {
        outer:
        while (true) {
            if (_hitEOF) {
                emitEOF();
                return _token;
            }

            _token = null;
            _channel = Token.DEFAULT_CHANNEL;
            _tokenStartCharIndex = _input.index();
            _tokenStartCharPositionInLine = getInterpreter().getCharPositionInLine();
            _tokenStartLine = getInterpreter().getLine();
            _text = null;
            do {
                _type = Token.INVALID_TYPE;
                int ttype = -1;

                // This entire method is copied from org.antlr.v4.runtime.Lexer, with the following bit
                // added to match the delimiters before we attempt to match the token
                boolean found = false;
                for (String terminator : delimiters) {
                    if (match(terminator)) {
                        ttype = SqlBaseParser.DELIMITER;
                        found = true;
                        break;
                    }
                }

                if (!found) {
                    try {
                        ttype = getInterpreter().match(_input, _mode);
                    }
                    catch (LexerNoViableAltException e) {
                        notifyListeners(e);        // report error
                        recover(e);
                        ttype = SKIP;
                    }
                }

                if (_input.LA(1) == IntStream.EOF) {
                    _hitEOF = true;
                }
                if (_type == Token.INVALID_TYPE) {
                    _type = ttype;
                }
                if (_type == SKIP) {
                    continue outer;
                }
            }
            while (_type == MORE);
            if (_token == null) {
                emit();
            }
            return _token;
        }
    }
    finally {
        // make sure we release marker after match or
        // unbuffered char stream will keep buffering
        _input.release(tokenStartMarker);
    }
}
项目:apple-data    文件:SimpleHbaseStatementsLexer.java   
@Override
public void recover(LexerNoViableAltException e) {
    log.error(e);
    throw new SimpleHBaseException("lexer error.", e);
}
项目:apple-data    文件:SimpleHbaseStatementsLexer.java   
@Override
public void recover(LexerNoViableAltException e) {
    log.error(e);
    throw new SimpleHBaseException("lexer error.", e);
}
项目:apple-data    文件:SimpleHbaseStatementsLexer.java   
@Override
public void recover(LexerNoViableAltException e) {
    log.error(e);
    throw new SimpleHBaseException("lexer error.", e);
}
项目:vzome-core    文件:ZomicASTCompiler.java   
RuntimeWrapperException(LexerNoViableAltException ex) {
    this.initCause(ex);
}
项目:jannovar    文件:HGVSLexer.java   
@Override
public void recover(LexerNoViableAltException e) {
    throw new RuntimeException(e); // bail out
}
项目:BigDataScript    文件:Bds.java   
/**
 * Create an AST from a program (using ANTLR lexer & parser)
 * Returns null if error
 * Use 'alreadyIncluded' to keep track of from 'include' statements
 */
public static ParseTree createAst(File file, boolean debug, Set<String> alreadyIncluded) {
    alreadyIncluded.add(Gpr.getCanonicalFileName(file));
    String fileName = file.toString();
    String filePath = fileName;

    BigDataScriptLexer lexer = null;
    BigDataScriptParser parser = null;

    try {
        filePath = file.getCanonicalPath();

        // Input stream
        if (!Gpr.canRead(filePath)) {
            CompilerMessages.get().addError("Can't read file '" + filePath + "'");
            return null;
        }

        // Create a CharStream that reads from standard input
        ANTLRFileStream input = new ANTLRFileStream(fileName);

        //---
        // Lexer: Create a lexer that feeds off of input CharStream
        //---
        lexer = new BigDataScriptLexer(input) {
            @Override
            public void recover(LexerNoViableAltException e) {
                throw new RuntimeException(e); // Bail out
            }
        };

        //---
        // Parser
        //---
        CommonTokenStream tokens = new CommonTokenStream(lexer);
        parser = new BigDataScriptParser(tokens);

        // Parser error handling
        parser.setErrorHandler(new CompileErrorStrategy()); // Bail out with exception if errors in parser
        parser.addErrorListener(new CompilerErrorListener()); // Catch some other error messages that 'CompileErrorStrategy' fails to catch

        // Begin parsing at main rule
        ParseTree tree = parser.programUnit();

        // Error loading file?
        if (tree == null) {
            System.err.println("Can't parse file '" + filePath + "'");
            return null;
        }

        // Show main nodes
        if (debug) {
            Timer.showStdErr("AST:");
            for (int childNum = 0; childNum < tree.getChildCount(); childNum++) {
                Tree child = tree.getChild(childNum);
                System.err.println("\t\tChild " + childNum + ":\t" + child + "\tTree:'" + child.toStringTree() + "'");
            }
        }

        // Included files
        boolean resolveIncludePending = true;
        while (resolveIncludePending)
            resolveIncludePending = resolveIncludes(tree, debug, alreadyIncluded);

        return tree;
    } catch (Exception e) {
        String msg = e.getMessage();
        CompilerMessages.get().addError("Could not compile " + filePath //
                + (msg != null ? " :" + e.getMessage() : "") //
        );
        return null;
    }
}