Closed ttranslit closed 4 months ago
Hello, I have trouble to initially load the model. I use version 1.2.28.
tsc = TargetSentimentClassifier()
It gives me the following error:
Cell In[27], line 1 ----> 1 tsc = TargetSentimentClassifier() File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/NewsSentiment/infer.py:60, in TargetSentimentClassifier.__init__(self, opts_from_infer, single_targets, logging_level, state_dict) 57 final_opts.training_mode = False 59 # prepare and initialize instructor ---> 60 instructor = prepare_and_start_instructor(final_opts) 62 # get stuff that we need from the instructor 63 self.model = instructor.own_model File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/NewsSentiment/train.py:1211, in prepare_and_start_instructor(opt) 1208 # set dataset_path to include experiment_path 1209 opt.dataset_path = os.path.join(opt.experiment_path, opt.dataset_path) -> 1211 ins = Instructor(opt) 1213 if opt.training_mode: 1214 opt.initializer = initializers[opt.initializer] File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/NewsSentiment/train.py:132, in Instructor.__init__(self, opt) 129 logger.info("initialized transformer tokenizers and models") 131 # setup own tokenizer --> 132 self.own_tokenizer = FXEasyTokenizer( 133 self.transformer_tokenizers, 134 self.opt.max_seq_len, 135 self.opt.knowledgesources, 136 self.opt.is_use_natural_target_phrase_for_spc, 137 ) 139 self.trainset = None 140 self.devset = None File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/NewsSentiment/dataset.py:105, in FXEasyTokenizer.__init__(self, tokenizers_name_and_obj, max_seq_len, knowledge_sources, is_use_natural_target_phrase_for_spc) 98 def __init__( 99 self, 100 tokenizers_name_and_obj: dict, (...) 103 is_use_natural_target_phrase_for_spc: bool, 104 ): --> 105 self._get_labels() 106 self.tokenizers_name_and_obj = tokenizers_name_and_obj 107 self.max_seq_len = max_seq_len File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/NewsSentiment/dataset.py:116, in FXEasyTokenizer._get_labels(cls) 114 return 115 try: --> 116 cls.NLP = spacy.load("en_core_web_sm") 117 except OSError: 118 spacy.cli.download("en_core_web_sm") File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/spacy/__init__.py:54, in load(name, vocab, disable, enable, exclude, config) 30 def load( 31 name: Union[str, Path], 32 *, (...) 37 config: Union[Dict[str, Any], Config] = util.SimpleFrozenDict(), 38 ) -> Language: 39 """Load a spaCy model from an installed package or a local path. 40 41 name (str): Package name or model path. (...) 52 RETURNS (Language): The loaded nlp object. 53 """ ---> 54 return util.load_model( 55 name, 56 vocab=vocab, 57 disable=disable, 58 enable=enable, 59 exclude=exclude, 60 config=config, 61 ) File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/spacy/util.py:442, in load_model(name, vocab, disable, enable, exclude, config) 440 return get_lang_class(name.replace("blank:", ""))() 441 if is_package(name): # installed as package --> 442 return load_model_from_package(name, **kwargs) # type: ignore[arg-type] 443 if Path(name).exists(): # path to model data directory 444 return load_model_from_path(Path(name), **kwargs) # type: ignore[arg-type] File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/spacy/util.py:478, in load_model_from_package(name, vocab, disable, enable, exclude, config) 461 """Load a model from an installed package. 462 463 name (str): The package name. (...) 475 RETURNS (Language): The loaded nlp object. 476 """ 477 cls = importlib.import_module(name) --> 478 return cls.load(vocab=vocab, disable=disable, enable=enable, exclude=exclude, config=config) File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/en_core_web_sm/__init__.py:10, in load(**overrides) 9 def load(**overrides): ---> 10 return load_model_from_init_py(__file__, **overrides) File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/spacy/util.py:659, in load_model_from_init_py(init_file, vocab, disable, enable, exclude, config) 657 if not model_path.exists(): 658 raise IOError(Errors.E052.format(path=data_path)) --> 659 return load_model_from_path( 660 data_path, 661 vocab=vocab, 662 meta=meta, 663 disable=disable, 664 enable=enable, 665 exclude=exclude, 666 config=config, 667 ) File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/spacy/util.py:516, in load_model_from_path(model_path, meta, vocab, disable, enable, exclude, config) 514 overrides = dict_to_dot(config, for_overrides=True) 515 config = load_config(config_path, overrides=overrides) --> 516 nlp = load_model_from_config( 517 config, 518 vocab=vocab, 519 disable=disable, 520 enable=enable, 521 exclude=exclude, 522 meta=meta, 523 ) 524 return nlp.from_disk(model_path, exclude=exclude, overrides=overrides) File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/spacy/util.py:564, in load_model_from_config(config, meta, vocab, disable, enable, exclude, auto_fill, validate) 561 # This will automatically handle all codes registered via the languages 562 # registry, including custom subclasses provided via entry points 563 lang_cls = get_lang_class(nlp_config["lang"]) --> 564 nlp = lang_cls.from_config( 565 config, 566 vocab=vocab, 567 disable=disable, 568 enable=enable, 569 exclude=exclude, 570 auto_fill=auto_fill, 571 validate=validate, 572 meta=meta, 573 ) 574 return nlp File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/spacy/language.py:1765, in Language.from_config(cls, config, vocab, disable, enable, exclude, meta, auto_fill, validate) 1763 filled["pretraining"] = orig_pretraining 1764 config["pretraining"] = orig_pretraining -> 1765 resolved_nlp = registry.resolve( 1766 filled["nlp"], validate=validate, schema=ConfigSchemaNlp 1767 ) 1768 create_tokenizer = resolved_nlp["tokenizer"] 1769 before_creation = resolved_nlp["before_creation"] File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/confection/__init__.py:759, in registry.resolve(cls, config, schema, overrides, validate) 750 @classmethod 751 def resolve( 752 cls, (...) 757 validate: bool = True, 758 ) -> Dict[str, Any]: --> 759 resolved, _ = cls._make( 760 config, schema=schema, overrides=overrides, validate=validate, resolve=True 761 ) 762 return resolved File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/confection/__init__.py:808, in registry._make(cls, config, schema, overrides, resolve, validate) 806 if not is_interpolated: 807 config = Config(orig_config).interpolate() --> 808 filled, _, resolved = cls._fill( 809 config, schema, validate=validate, overrides=overrides, resolve=resolve 810 ) 811 filled = Config(filled, section_order=section_order) 812 # Check that overrides didn't include invalid properties not in config File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/confection/__init__.py:862, in registry._fill(cls, config, schema, validate, resolve, parent, overrides) 860 field = schema.__fields__[key] 861 schema.__fields__[key] = copy_model_field(field, Any) --> 862 promise_schema = cls.make_promise_schema(value, resolve=resolve) 863 filled[key], validation[v_key], final[key] = cls._fill( 864 value, 865 promise_schema, (...) 869 overrides=overrides, 870 ) 871 reg_name, func_name = cls.get_constructor(final[key]) File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/confection/__init__.py:1054, in registry.make_promise_schema(cls, obj, resolve) 1052 if not resolve and not cls.has(reg_name, func_name): 1053 return EmptySchema -> 1054 func = cls.get(reg_name, func_name) 1055 # Read the argument annotations and defaults from the function signature 1056 id_keys = [k for k in obj.keys() if k.startswith("@")] File /opt/anaconda3/envs/sent/lib/python3.11/site-packages/spacy/util.py:128, in registry.get(cls, registry_name, func_name) 126 if not hasattr(cls, registry_name): 127 names = ", ".join(cls.get_registry_names()) or "none" --> 128 raise RegistryError(Errors.E892.format(name=registry_name, available=names)) 129 reg = getattr(cls, registry_name) 130 try: RegistryError: [E892] Unknown function registry: 'vectors'. Available names: architectures, augmenters, batchers, callbacks, cli, datasets, displacy_colors, factories, initializers, languages, layers, lemmatizers, loggers, lookups, losses, misc, models, ops, optimizers, readers, schedules, scorers, tokenizers
should be fixed by now, if not feel free to reopen
Hello, I have trouble to initially load the model. I use version 1.2.28.
It gives me the following error: