Python token 模块,ERRORTOKEN 实例源码

我们从Python开源项目中,提取了以下1个代码示例,用于说明如何使用token.ERRORTOKEN

项目:pyshell    作者:oglops    | 项目源码 | 文件源码
def method_tokenize(self, text):
        """ return true if tokenization information 
            lets us shortcut completions such
            as returning error/string-information.

            this method basically checks if we are inside
            a raw/string definition.

            XXX: Could we incorporate trying filename-completions
                 here?
        """
        import token, tokenize

        class TokenEater:
            """ Token Receiver function (as class-instance) 
            """
            def __init__(self,text,config):
                self.text = text
                self.config = config

            def __call__(self, ttype, ttoken, srowscol, erowecol, line):
                srow, scol = srowscol
                erow, ecol = erowecol 
                #debug("got token: %s" % repr(ttoken))
                self.context = '%s' % (line[max(0,scol-1):scol+4])
                if ttype == token.ERRORTOKEN:
                    #debug("found error token: %s" % repr(ttoken))
                    if ttoken.strip():
                        if ttoken in '"\'':
                            self.handle_open_string(line[:scol].rstrip()[-1:])
                        else:
                            raise Error('error at %s' % self.context)

            def handle_open_string(self, previous):
                """ raise help completion for open strings """
                #debug("previousstrip: "+repr(previous))
                if previous == 'r':
                    fin = [['in rawstring, <tab> for regexinfo'],
                           self.config.string_help[:]]
                elif previous == 'u':
                    fin = [['<open unicode string>']]
                else:
                    fin = [['<open string>']]
                raise Finish(fin)

        try:
            eater = TokenEater(text, self.config)
            tokenize.tokenize(['',text].pop, eater)

        except tokenize.TokenError:
            ex = sys.exc_info()[1]
            debug("ex:%s" % ex)
            msg = ex.args[0]
            if msg[:3]=='EOF'and msg[-6:]=='string':
                eater.handle_open_string(text[ex.args[1][1]])