Closed seangrant82 closed 1 year ago
It seems like an issue with the spacy installation. Please ensure that you are using python 3.8 and create a separate conda env to install opennyai.
Fixed the issue with new release. Issue was from one of the dependency. Now added support to python 3.8, 3.9, 3.10.
@seangrant82 Try creating a fresh environment and installing latest version of library. To test you can use colab as well.
I followed all the steps from the main page and copy/pasted the code into a jupyter lab notebook and ran into this error:
`--------------------------------------------------------------------------- TypeError Traceback (most recent call last) Cell In[6], line 1 ----> 1 from opennyai import Pipeline 2 from opennyai.utils import Data 3 import urllib
File ~.conda\envs\legal_contract\lib\site-packages\opennyai__init__.py:1 ----> 1 from opennyai import ner, utils 2 from opennyai.pipeline import Pipeline 3 from opennyai.rhetorical_roles.rhetorical_roles import RhetoricalRolePredictor
File ~.conda\envs\legal_contract\lib\site-packages\opennyai\ner__init__.py:1 ----> 1 from .ner_utils import load, get_json_from_spacy_doc, ner_displacy_option, get_csv, get_unique_precedent_count, \ 2 get_unique_provision_count, get_unique_statute_count
File ~.conda\envs\legal_contract\lib\site-packages\opennyai\ner\ner_utils.py:6 2 import copy 4 import pandas as pd ----> 6 from opennyai.ner.InLegalNER.InLegalNER import InLegalNER 9 def load(model_name: str = 'en_legal_ner_trf', use_gpu: bool = True): 10 """Returns object of InLegalNER class. 11 It is used for loading InLegalNER model in memory. 12 Args: (...) 17 documentation 18 """
File ~.conda\envs\legal_contract\lib\site-packages\opennyai\ner\InLegalNER\InLegalNER.py:3 1 import copy ----> 3 import spacy 4 from tqdm import tqdm 5 from wasabi import msg
File ~.conda\envs\legal_contract\lib\site-packages\spacy__init__.py:14 11 from thinc.api import prefer_gpu, require_gpu, require_cpu # noqa: F401 12 from thinc.api import Config ---> 14 from . import pipeline # noqa: F401 15 from .cli.info import info # noqa: F401 16 from .glossary import explain # noqa: F401
File ~.conda\envs\legal_contract\lib\site-packages\spacy\pipeline__init__.py:1 ----> 1 from .attributeruler import AttributeRuler 2 from .dep_parser import DependencyParser 3 from .entity_linker import EntityLinker
File ~.conda\envs\legal_contract\lib\site-packages\spacy\pipeline\attributeruler.py:6 3 import srsly 4 from pathlib import Path ----> 6 from .pipe import Pipe 7 from ..errors import Errors 8 from ..training import Example
File ~.conda\envs\legal_contract\lib\site-packages\spacy\pipeline\pipe.pyx:8, in init spacy.pipeline.pipe()
File ~.conda\envs\legal_contract\lib\site-packages\spacy\training__init__.py:11 9 from .batchers import minibatch_by_padded_size, minibatch_by_words # noqa: F401 10 from .loggers import console_logger # noqa: F401 ---> 11 from .callbacks import create_copy_from_base_model # noqa: F401
File ~.conda\envs\legal_contract\lib\site-packages\spacy\training\callbacks.py:3 1 from typing import Callable, Optional 2 from ..errors import Errors ----> 3 from ..language import Language 4 from ..util import load_model, registry, logger 7 @registry.callbacks("spacy.copy_from_base_model.v1") 8 def create_copy_from_base_model( 9 tokenizer: Optional[str] = None, 10 vocab: Optional[str] = None, 11 ) -> Callable[[Language], Language]:
File ~.conda\envs\legal_contract\lib\site-packages\spacy\language.py:25 23 from .pipe_analysis import validate_attrs, analyze_pipes, print_pipe_analysis 24 from .training import Example, validate_examples ---> 25 from .training.initialize import init_vocab, init_tok2vec 26 from .scorer import Scorer 27 from .util import registry, SimpleFrozenList, _pipe, raise_error
File ~.conda\envs\legal_contract\lib\site-packages\spacy\training\initialize.py:14 11 from itertools import islice 12 import warnings ---> 14 from .pretrain import get_tok2vec_ref 15 from ..lookups import Lookups 16 from ..vectors import Vectors, Mode as VectorsMode
File ~.conda\envs\legal_contract\lib\site-packages\spacy\training\pretrain.py:16 14 from ..errors import Errors 15 from ..tokens import Doc ---> 16 from ..schemas import ConfigSchemaPretrain 17 from ..util import registry, load_model_from_config, dot_to_object 20 def pretrain( 21 config: Config, 22 output_dir: Path, (...) 26 silent: bool = True, 27 ):
File ~.conda\envs\legal_contract\lib\site-packages\spacy\schemas.py:216 210 UnderscoreValue = Union[ 211 TokenPatternString, TokenPatternNumber, str, int, float, list, bool 212 ] 213 IobValue = Literal["", "I", "O", "B", 0, 1, 2, 3] --> 216 class TokenPattern(BaseModel): 217 orth: Optional[StringValue] = None 218 text: Optional[StringValue] = None
File ~.conda\envs\legal_contract\lib\site-packages\pydantic\main.py:299, in pydantic.main.ModelMetaclass.new()
File ~.conda\envs\legal_contract\lib\site-packages\pydantic\fields.py:411, in pydantic.fields.ModelField.infer()
File ~.conda\envs\legal_contract\lib\site-packages\pydantic\fields.py:342, in pydantic.fields.ModelField.init()
File ~.conda\envs\legal_contract\lib\site-packages\pydantic\fields.py:451, in pydantic.fields.ModelField.prepare()
File ~.conda\envs\legal_contract\lib\site-packages\pydantic\fields.py:545, in pydantic.fields.ModelField._type_analysis()
File ~.conda\envs\legal_contract\lib\site-packages\pydantic\fields.py:550, in pydantic.fields.ModelField._type_analysis()
File ~.conda\envs\legal_contract\lib\typing.py:774, in _GenericAlias.subclasscheck(self, cls) 772 if self._special: 773 if not isinstance(cls, _GenericAlias): --> 774 return issubclass(cls, self.origin) 775 if cls._special: 776 return issubclass(cls.origin, self.origin)
TypeError: issubclass() arg 1 must be a class`