text
stringlengths 0
828
|
|---|
900,"def next(self, *expectation, **kwargs):
|
""""""
|
Parses the next token from the input and returns it. The new token can be
|
accessed from the #token attribute after the method was called.
|
If one or more arguments are specified, they must be rule names that are to
|
be expected at the current position. They will be attempted to be matched
|
first (in the specicied order). If the expectation could not be met, an
|
#UnexpectedTokenError is raised.
|
An expected Token will not be skipped, even if its rule defines it so.
|
# Arguments
|
expectation (str): The name of one or more rules that are expected from the
|
current position of the parser. If empty, the first matching token of ALL
|
rules will be returned. In this case, skippable tokens will be skipped.
|
as_accept (bool): If passed True, this method behaves the same as the
|
#accept() method. The default value is #False.
|
weighted (bool): If passed True, the tokens specified with *expectations*
|
are checked first, effectively giving them a higher priority than other
|
they would have from the order in the #rules list. The default value is
|
#False.
|
# Raises
|
ValueError: if an expectation doesn't match with a rule name.
|
UnexpectedTokenError: Ff an expectation is given and the expectation
|
wasn't fulfilled. Only when *as_accept* is set to #False.
|
TokenizationError: if a token could not be generated from the current
|
position of the Scanner.
|
""""""
|
as_accept = kwargs.pop('as_accept', False)
|
weighted = kwargs.pop('weighted', False)
|
for key in kwargs:
|
raise TypeError('unexpected keyword argument {0!r}'.format(key))
|
if self.token and self.token.type == eof:
|
if not as_accept and expectation and eof not in expectation:
|
raise UnexpectedTokenError(expectation, self.token)
|
elif as_accept and eof in expectation:
|
return self.token
|
elif as_accept:
|
return None
|
return self.token
|
token = None
|
while token is None:
|
# Stop if we reached the end of the input.
|
cursor = self.scanner.cursor
|
if not self.scanner:
|
token = Token(eof, cursor, None, None)
|
break
|
value = None
|
# Try to match the expected tokens.
|
if weighted:
|
for rule_name in expectation:
|
if rule_name == eof:
|
continue
|
rules = self.rules_map.get(rule_name)
|
if rules is None:
|
raise ValueError('unknown rule', rule_name)
|
for rule in rules:
|
value = rule.tokenize(self.scanner)
|
if value:
|
break
|
if value:
|
break
|
self.scanner.restore(cursor)
|
# Match the rest of the rules, but only if we're not acting
|
# like the accept() method that doesn't need the next token
|
# for raising an UnexpectedTokenError.
|
if not value:
|
if as_accept and weighted:
|
# Check only skippable rules if we're only trying to accept
|
# a certain token type and may consume any skippable tokens
|
# until then.
|
check_rules = self.skippable_rules
|
else:
|
check_rules = self.rules
|
for rule in check_rules:
|
if weighted and expectation and rule.name in expectation:
|
# Skip rules that we already tried.
|
continue
|
value = rule.tokenize(self.scanner)
|
if value:
|
break
|
self.scanner.restore(cursor)
|
if not value:
|
if as_accept:
|
return None
|
token = Token(None, cursor, self.scanner.char, None)
|
else:
|
assert rule, ""we should have a rule by now""
|
if type(value) is not Token:
|
if isinstance(value, tuple):
|
value, string_repr = value
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.