Logo Search packages:      
Sourcecode: yapps2 version File versions  Download package

def yapps::runtime::Scanner::token (   self,
  context = None 

Scan for another token.

Definition at line 258 of file runtime.py.

00258                                              :
            """Scan for another token."""

            while 1:
                  if self.stack:
                              return self.stack.token(restrict, context)
                        except StopIteration:
                              self.stack = None

            # Keep looking for a token, ignoring any in self.ignore

                  # special handling for end-of-file
                  if self.stacked and self.pos==len(self.input):
                        raise StopIteration

                  # Search the patterns for the longest match, with earlier
                  # tokens in the list having preference
                  best_match = -1
                  best_pat = '(error)'
                  best_m = None
                  for p, regexp in self.patterns:
                        # First check to see if we're ignoring this token
                        if restrict and p not in restrict and p not in self.ignore:
                        m = regexp.match(self.input, self.pos)
                        if m and m.end()-m.start() > best_match:
                              # We got a match that's better than the previous one
                              best_pat = p
                              best_match = m.end()-m.start()
                              best_m = m
                  # If we didn't find anything, raise an error
                  if best_pat == '(error)' and best_match < 0:
                        msg = 'Bad Token'
                        if restrict:
                              msg = 'Trying to find one of '+', '.join(restrict)
                        raise SyntaxError(self.get_pos(), msg, context=context)

                  ignore = best_pat in self.ignore
                  value = self.input[self.pos:self.pos+best_match]
                  if not ignore:
                        tok=Token(type=best_pat, value=value, pos=self.get_pos())

                  self.pos += best_match

                  npos = value.rfind("\n")
                  if npos > -1:
                        self.col = best_match-npos
                        self.line += value.count("\n")
                        self.col += best_match

                  # If we found something that isn't to be ignored, return it
                  if not ignore:
                        if len(self.tokens) >= 10:
                              del self.tokens[0]
                        self.last_read_token = tok
                        # print repr(tok)
                        return tok
                        ignore = self.ignore[best_pat]
                        if ignore:
                              ignore(self, best_m)

      def peek(self, *types, **kw):

Generated by  Doxygen 1.6.0   Back to index