Closed tigerfoot closed 5 years ago
Trying to debug it more I've got a clearer stack trace
_________________________________________________________________________ test_missing_end __________________________________________________________________________
token = Token($END, '')
def get_action(token):
state = state_stack[-1]
try:
> return states[state][token.type]
E KeyError: '$END'
/usr/lib/python3.7/site-packages/lark/parsers/lalr_parser.py:46: KeyError
During handling of the above exception, another exception occurred:
def test_missing_end():
"""
Check an invalid keyword throws a schema validation
error
"""
s = """MAP
LAYER
NAME "Test"
LAYER
NAME "Test2"
END
END"""
p = Parser()
try:
> print(p.parse(s))
tests/test_errors.py:79:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <mappyfile.parser.Parser object at 0x7eff6208ba20>, text = 'MAP\nLAYER\nNAME "Test"\nLAYER\nNAME "Test2"\nEND\nEND', fn = None
def parse(self, text, fn=None):
"""
Parse the Mapfile
"""
text = str(text)
if self.expand_includes:
text = self.load_includes(text, fn=fn)
try:
self._comments[:] = [] # clear any comments from a previous parse
> tree = self.lalr.parse(text)
mappyfile/parser.py:195:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = Lark(open('<string>'), parser='lalr', lexer='contextual', ...), text = 'MAP\nLAYER\nNAME "Test"\nLAYER\nNAME "Test2"\nEND\nEND'
def parse(self, text):
"Parse the given text, according to the options provided. Returns a tree, unless specified otherwise."
> return self.parser.parse(text)
/usr/lib/python3.7/site-packages/lark/lark.py:228:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <lark.parser_frontends.LALR_ContextualLexer object at 0x7eff61fde5f8>, text = 'MAP\nLAYER\nNAME "Test"\nLAYER\nNAME "Test2"\nEND\nEND'
def parse(self, text):
token_stream = self.lex(text)
sps = self.lexer.set_parser_state
> return self.parser.parse(token_stream, *[sps] if sps is not NotImplemented else [])
/usr/lib/python3.7/site-packages/lark/parser_frontends.py:38:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <lark.parsers.lalr_parser._Parser object at 0x7eff61d98518>, seq = <generator object ContextualLexer.lex at 0x7eff61e57a20>
set_state = <bound method ContextualLexer.set_parser_state of <lark.lexer.ContextualLexer object at 0x7eff61e16a20>>
def parse(self, seq, set_state=None):
token = None
stream = iter(seq)
states = self.states
state_stack = [self.start_state]
value_stack = []
if set_state: set_state(self.start_state)
def get_action(token):
state = state_stack[-1]
try:
return states[state][token.type]
except KeyError:
expected = [s for s in states[state].keys() if s.isupper()]
raise UnexpectedToken(token, expected, state=state)
def reduce(rule):
size = len(rule.expansion)
if size:
s = value_stack[-size:]
del state_stack[-size:]
del value_stack[-size:]
else:
s = []
value = self.callbacks[rule](s)
_action, new_state = states[state_stack[-1]][rule.origin.name]
assert _action is Shift
state_stack.append(new_state)
value_stack.append(value)
# Main LALR-parser loop
for token in stream:
while True:
action, arg = get_action(token)
assert arg != self.end_state
if action is Shift:
state_stack.append(arg)
value_stack.append(token)
if set_state: set_state(arg)
break # next token
else:
reduce(arg)
token = Token.new_borrow_pos('$END', '', token) if token else Token('$END', '', 0, 1, 1)
while True:
> _action, arg = get_action(token)
/usr/lib/python3.7/site-packages/lark/parsers/lalr_parser.py:83:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
token = Token($END, '')
def get_action(token):
state = state_stack[-1]
try:
return states[state][token.type]
except KeyError:
expected = [s for s in states[state].keys() if s.isupper()]
> raise UnexpectedToken(token, expected, state=state)
E lark.exceptions.UnexpectedToken: Unexpected token Token($END, '') at line 7, column 1.
E Expected one of:
E * STYLE
E * MAP
E * SCALEBAR
E * CLUSTER
E * FEATURE
E * SCALETOKEN
E * JOIN
E * METADATA
E * QUERYMAP
E * LEADER
E * _END
E * CLASS
E * VALUES
E * GRID
E * OUTPUTFORMAT
E * LABEL
E * COMPOSITE
E * VALIDATION
E * POINTS
E * WEB
E * REFERENCE
E * LEGEND
E * CONFIG
E * PROJECTION
E * UNQUOTED_STRING
E * SYMBOL
E * PATTERN
E * LAYER
/usr/lib/python3.7/site-packages/lark/parsers/lalr_parser.py:49: UnexpectedToken
During handling of the above exception, another exception occurred:
def test_missing_end():
"""
Check an invalid keyword throws a schema validation
error
"""
s = """MAP
LAYER
NAME "Test"
LAYER
NAME "Test2"
END
END"""
p = Parser()
try:
print(p.parse(s))
p.parse(s)
except UnexpectedToken as ex:
print(ex.__dict__)
assert(ex.line == 7)
assert(ex.column == 1)
> assert(str(ex.token) == 'END')
E AssertionError: assert '' == 'END'
E + END
tests/test_errors.py:85: AssertionError
----------------------------------------------------------------------- Captured stdout call ------------------------------------------------------------------------
{'token': Token($END, ''), 'expected': ['STYLE', 'MAP', 'SCALEBAR', 'CLUSTER', 'FEATURE', 'SCALETOKEN', 'JOIN', 'METADATA', 'QUERYMAP', 'LEADER', '_END', 'CLASS', '}
------------------------------------------------------------------------- Captured log call -------------------------------------------------------------------------
parser.py 203 ERROR Parsing of Mapfile unsuccessful
Without being an expert of whatever in python world using this patch fix the failure
--- mappyfile-0.7.5/tests/test_errors.py 2018-09-14 00:18:18.000000000 +0200
+++ mappyfile/tests/test_errors.py 2019-02-10 14:41:24.410491271 +0100
@@ -81,7 +81,7 @@
print(ex.__dict__)
assert(ex.line == 7)
assert(ex.column == 1)
- assert(str(ex.token) == 'END')
+ assert(str(ex.token) == '')
@pytest.mark.xfail
@@ -101,7 +101,7 @@
def run_tests():
- """
+ r"""
Need to comment out the following line in C:\VirtualEnvs\mappyfile\Lib\site-packages\pep8.py
#stdin_get_value = sys.stdin.read
Or get AttributeError: '_ReplInput' object has no attribute 'read'
You should change
from lark.lexer import UnexpectedInput
to
from lark import UnexpectedInput
or maybe
from lark import UnexpectedCharacters
The assertion error seems like it needs to change to $END
Sorry! Didn't expect these internal changes to break anything.
@tigerfoot - thanks for spotting this and the debugging.
@erezsh - thanks for your input. I updated the assert check to assert(ex.token.type == "$END")
I was already using from lark import UnexpectedInput
so I don't think any changes related to this broke anything.
Even if lark-parser is stick in setup.py in distribution world we will have to build the latest version lark-parser is now at 0.6.6
On dev platform with python 3.7.2 trying to run tests failed