leolani / cltl-chatbots

Series of chatbots that demonstrate Leolani’s functionalities
Apache License 2.0
0 stars 1 forks source link

`JSONDecodeError: Expecting value: line 7 column 1 (char 6)` #11

Closed tae898 closed 2 years ago

tae898 commented 2 years ago
/home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/rdflib_jsonld/__init__.py:9: DeprecationWarning: The rdflib-jsonld package has been integrated into rdflib as of rdflib==6.0.1.  Please remove rdflib-jsonld from your project's dependencies.
  warnings.warn(
[nltk_data] Downloading package punkt to /home/tk/nltk_data...
[nltk_data]   Package punkt is already up-to-date!
---------------------------------------------------------------------------
JSONDecodeError                           Traceback (most recent call last)
/home/tk/repos/cltl-chatbots/src/notebooks/lets-chat_with_a_friend_and_a_brain.ipynb Cell 3' in <module>
      [2](vscode-notebook-cell:/home/tk/repos/cltl-chatbots/src/notebooks/lets-chat_with_a_friend_and_a_brain.ipynb#ch0000002?line=1)[ import emissor as em
      ]()[3](vscode-notebook-cell:/home/tk/repos/cltl-chatbots/src/notebooks/lets-chat_with_a_friend_and_a_brain.ipynb#ch0000002?line=2)[ from cltl import brain
----> ]()[4](vscode-notebook-cell:/home/tk/repos/cltl-chatbots/src/notebooks/lets-chat_with_a_friend_and_a_brain.ipynb#ch0000002?line=3)[ from cltl.triple_extraction.api import Chat, UtteranceHypothesis
      ]()[5](vscode-notebook-cell:/home/tk/repos/cltl-chatbots/src/notebooks/lets-chat_with_a_friend_and_a_brain.ipynb#ch0000002?line=4)[ from emissor.persistence import ScenarioStorage
      ]()[6](vscode-notebook-cell:/home/tk/repos/cltl-chatbots/src/notebooks/lets-chat_with_a_friend_and_a_brain.ipynb#ch0000002?line=5)[ from emissor.representation.annotation import AnnotationType, Token, NER

File ~/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/api.py:15, in <module>
     ]()[13](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/api.py?line=12)[ from cltl.combot.backend.api.discrete import UtteranceType
     ]()[14](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/api.py?line=13)[ from cltl.combot.backend.utils.casefolding import casefold_text
---> ]()[15](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/api.py?line=14)[ from cltl.triple_extraction.analyzer import Analyzer
     ]()[16](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/api.py?line=15)[ from cltl.triple_extraction.data.base_cases import friends
     ]()[17](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/api.py?line=16)[ from cltl.triple_extraction.ner import NER

File ~/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/analyzer.py:4, in <module>
      ]()[1](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/analyzer.py?line=0)[ from cltl.combot.backend.api.discrete import UtteranceType, Emotion
      ]()[3](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/analyzer.py?line=2)[ from cltl.triple_extraction.ner import NER
----> ]()[4](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/analyzer.py?line=3)[ from cltl.triple_extraction.utils.helper_functions import *
      ]()[7](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/analyzer.py?line=6)[ class Analyzer(object):
      ]()[8](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/analyzer.py?line=7)[     # Load Grammar Json
      ]()[9](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/analyzer.py?line=8)[     GRAMMAR_JSON = os.path.join(os.path.dirname(__file__), 'data', 'lexicon.json')

File ~/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/utils/helper_functions.py:19, in <module>
     ]()[16](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/utils/helper_functions.py?line=15)[ wnl = WordNetLemmatizer()
     ]()[18](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/utils/helper_functions.py?line=17)[ ROOT = os.path.join(os.path.dirname(__file__), '..')
---> ]()[19](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/utils/helper_functions.py?line=18)[ lexicon = json.load(open(os.path.join(ROOT, 'data', 'lexicon.json')))
     ]()[22](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/utils/helper_functions.py?line=21)[ def trim_dash(triple):
     ]()[23](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/utils/helper_functions.py?line=22)[     """
     ]()[24](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/utils/helper_functions.py?line=23)[     :param triple: a set with three elements (subject, predicate, object)
     ]()[25](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/utils/helper_functions.py?line=24)[     :return: clean triple with extra dashes removed
     ]()[26](file:///home/tk/.virtualenvs/leolani/lib/python3.8/site-packages/cltl/triple_extraction/utils/helper_functions.py?line=25)[     """

File /usr/local/lib/python3.8/json/__init__.py:293, in load(fp, cls, object_hook, parse_float, parse_int, parse_constant, object_pairs_hook, **kw)
    ]()[274](file:///usr/local/lib/python3.8/json/__init__.py?line=273)[ def load(fp, *, cls=None, object_hook=None, parse_float=None,
    ]()[275](file:///usr/local/lib/python3.8/json/__init__.py?line=274)[         parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
    ]()[276](file:///usr/local/lib/python3.8/json/__init__.py?line=275)[     """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
    ]()[277](file:///usr/local/lib/python3.8/json/__init__.py?line=276)[     a JSON document) to a Python object.
    ]()[278](file:///usr/local/lib/python3.8/json/__init__.py?line=277)[ 
   (...)
    ]()[291](file:///usr/local/lib/python3.8/json/__init__.py?line=290)[     kwarg; otherwise ``JSONDecoder`` is used.
    ]()[292](file:///usr/local/lib/python3.8/json/__init__.py?line=291)[     """
--> ]()[293](file:///usr/local/lib/python3.8/json/__init__.py?line=292)[     return loads(fp.read(),
    ]()[294](file:///usr/local/lib/python3.8/json/__init__.py?line=293)[         cls=cls, object_hook=object_hook,
    ]()[295](file:///usr/local/lib/python3.8/json/__init__.py?line=294)[         parse_float=parse_float, parse_int=parse_int,
    ]()[296](file:///usr/local/lib/python3.8/json/__init__.py?line=295)[         parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, **kw)

File /usr/local/lib/python3.8/json/__init__.py:357, in loads(s, cls, object_hook, parse_float, parse_int, parse_constant, object_pairs_hook, **kw)
    ]()[352](file:///usr/local/lib/python3.8/json/__init__.py?line=351)[     del kw['encoding']
    ]()[354](file:///usr/local/lib/python3.8/json/__init__.py?line=353)[ if (cls is None and object_hook is None and
    ]()[355](file:///usr/local/lib/python3.8/json/__init__.py?line=354)[         parse_int is None and parse_float is None and
    ]()[356](file:///usr/local/lib/python3.8/json/__init__.py?line=355)[         parse_constant is None and object_pairs_hook is None and not kw):
--> ]()[357](file:///usr/local/lib/python3.8/json/__init__.py?line=356)[     return _default_decoder.decode(s)
    ]()[358](file:///usr/local/lib/python3.8/json/__init__.py?line=357)[ if cls is None:
    ]()[359](file:///usr/local/lib/python3.8/json/__init__.py?line=358)[     cls = JSONDecoder

File /usr/local/lib/python3.8/json/decoder.py:337, in JSONDecoder.decode(self, s, _w)
    ]()[332](file:///usr/local/lib/python3.8/json/decoder.py?line=331)[ def decode(self, s, _w=WHITESPACE.match):
    ]()[333](file:///usr/local/lib/python3.8/json/decoder.py?line=332)[     """Return the Python representation of ``s`` (a ``str`` instance
    ]()[334](file:///usr/local/lib/python3.8/json/decoder.py?line=333)[     containing a JSON document).
    ]()[335](file:///usr/local/lib/python3.8/json/decoder.py?line=334)[ 
    ]()[336](file:///usr/local/lib/python3.8/json/decoder.py?line=335)[     """
--> ]()[337](file:///usr/local/lib/python3.8/json/decoder.py?line=336)[     obj, end = self.raw_decode(s, idx=_w(s, 0).end())
    ]()[338](file:///usr/local/lib/python3.8/json/decoder.py?line=337)[     end = _w(s, end).end()
    ]()[339](file:///usr/local/lib/python3.8/json/decoder.py?line=338)[     if end != len(s):

File /usr/local/lib/python3.8/json/decoder.py:355, in JSONDecoder.raw_decode(self, s, idx)
    ]()[353](file:///usr/local/lib/python3.8/json/decoder.py?line=352)[     obj, end = self.scan_once(s, idx)
    ]()[354](file:///usr/local/lib/python3.8/json/decoder.py?line=353)[ except StopIteration as err:
--> ]()[355](file:///usr/local/lib/python3.8/json/decoder.py?line=354)[     raise JSONDecodeError("Expecting value", s, err.value) from None
    ]()[356](file:///usr/local/lib/python3.8/json/decoder.py?line=355)[ return obj, end

JSONDecodeError: Expecting value: line 7 column 1 (char 6)]()
piekvossen commented 2 years ago

in cltl-knowledextraction api changes were made that broke this import. The function UtteranceHypothesis is needed to determine whether it is a statement or a question to select further processing. This was removed from from cltl.triple_extraction.api. I also removed it from the script so now it works without testing this and treats all as statements.