allenai / scibert

A BERT model for scientific text.
https://arxiv.org/abs/1903.10676
Apache License 2.0
1.47k stars 214 forks source link

Error: relation extraction in the chemprot dataset in Pytorch #89

Open ghost opened 4 years ago

ghost commented 4 years ago

Traceback (most recent call last): File "/home/hui2019/scibert-master/scibert-master/src/allennlp/allennlp/common/util.py", line 289, in get_spacy_model spacy_model = spacy.load(spacy_model_name, disable=disable) File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/spacy/init.py", line 27, in load return util.load_model(name, **overrides) File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/spacy/util.py", line 139, in load_model raise IOError(Errors.E050.format(name=name)) OSError: [E050] Can't find model 'en_core_web_sm'. It doesn't seem to be a shortcut link, a Python package or a valid path to a data directory.

During handling of the above exception, another exception occurred:

Traceback (most recent call last): File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/urllib3/connection.py", line 160, in _new_conn (self._dns_host, self.port), self.timeout, **extra_kw File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/urllib3/util/connection.py", line 84, in create_connection raise err File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/urllib3/util/connection.py", line 74, in create_connection sock.connect(sa) ConnectionRefusedError: [Errno 111] Connection refused

During handling of the above exception, another exception occurred:

Traceback (most recent call last): File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/urllib3/connectionpool.py", line 677, in urlopen chunked=chunked, File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/urllib3/connectionpool.py", line 381, in _make_request self._validate_conn(conn) File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/urllib3/connectionpool.py", line 976, in _validate_conn conn.connect() File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/urllib3/connection.py", line 308, in connect conn = self._new_conn() File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/urllib3/connection.py", line 172, in _new_conn self, "Failed to establish a new connection: %s" % e urllib3.exceptions.NewConnectionError: <urllib3.connection.HTTPSConnection object at 0x7f2c799760b8>: Failed to establish a new connection: [Errno 111] Connection refused

During handling of the above exception, another exception occurred:

Traceback (most recent call last): File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/requests/adapters.py", line 449, in send timeout=timeout File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/urllib3/connectionpool.py", line 725, in urlopen method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2] File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/urllib3/util/retry.py", line 439, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='raw.githubusercontent.com', port=443): Max retries exceeded with url: /explosion/spacy-models/master/shortcuts-v2.json (Caused by NewConnectionError('<urllib3.connection.HTTPSConnection object at 0x7f2c799760b8>: Failed to establish a new connection: [Errno 111] Connection refused',))

During handling of the above exception, another exception occurred:

Traceback (most recent call last): File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/runpy.py", line 193, in _run_module_as_main "main", mod_spec) File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/runpy.py", line 85, in _run_code exec(code, run_globals) File "/home/hui2019/scibert-master/scibert-master/src/allennlp/allennlp/run.py", line 21, in run() File "/home/hui2019/scibert-master/scibert-master/src/allennlp/allennlp/run.py", line 18, in run main(prog="allennlp") File "/home/hui2019/scibert-master/scibert-master/src/allennlp/allennlp/commands/init.py", line 102, in main args.func(args) File "/home/hui2019/scibert-master/scibert-master/src/allennlp/allennlp/commands/train.py", line 124, in train_model_from_args args.cache_prefix) File "/home/hui2019/scibert-master/scibert-master/src/allennlp/allennlp/commands/train.py", line 168, in train_model_from_file cache_directory, cache_prefix) File "/home/hui2019/scibert-master/scibert-master/src/allennlp/allennlp/commands/train.py", line 226, in train_model cache_prefix) File "/home/hui2019/scibert-master/scibert-master/src/allennlp/allennlp/training/trainer_pieces.py", line 41, in from_params all_datasets = training_util.datasets_from_params(params, cache_directory, cache_prefix) File "/home/hui2019/scibert-master/scibert-master/src/allennlp/allennlp/training/util.py", line 165, in datasets_from_params dataset_reader = DatasetReader.from_params(dataset_reader_params) File "/home/hui2019/scibert-master/scibert-master/src/allennlp/allennlp/common/from_params.py", line 365, in from_params return subclass.from_params(params=params, extras) File "/home/hui2019/scibert-master/scibert-master/src/allennlp/allennlp/common/from_params.py", line 388, in from_params return cls(kwargs) # type: ignore File "/home/hui2019/scibert-master/scibert-master/scibert/dataset_readers/classification_dataset_reader.py", line 33, in init self._tokenizer = tokenizer or WordTokenizer() File "/home/hui2019/scibert-master/scibert-master/src/allennlp/allennlp/data/tokenizers/word_tokenizer.py", line 44, in init self._word_splitter = word_splitter or SpacyWordSplitter() File "/home/hui2019/scibert-master/scibert-master/src/allennlp/allennlp/data/tokenizers/word_splitter.py", line 173, in init self.spacy = get_spacy_model(language, pos_tags, parse, ner) File "/home/hui2019/scibert-master/scibert-master/src/allennlp/allennlp/common/util.py", line 292, in get_spacy_model spacy_download(spacy_model_name) File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/spacy/cli/download.py", line 38, in download shortcuts = get_json(about.shortcuts, "available shortcuts") File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/spacy/cli/download.py", line 84, in get_json r = requests.get(url) File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/requests/api.py", line 76, in get return request('get', url, params=params, kwargs) File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/requests/api.py", line 61, in request return session.request(method=method, url=url, kwargs) File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/requests/sessions.py", line 530, in request resp = self.send(prep, send_kwargs) File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/requests/sessions.py", line 643, in send r = adapter.send(request, kwargs) File "/home/hui2019/anaconda3/envs/py36_pytorch1.3.1/lib/python3.6/site-packages/requests/adapters.py", line 516, in send raise ConnectionError(e, request=request) requests.exceptions.ConnectionError: HTTPSConnectionPool(host='raw.githubusercontent.com', port=443): Max retries exceeded with url: /explosion/spacy-models/master/shortcuts-v2.json (Caused by NewConnectionError('<urllib3.connection.HTTPSConnection object at 0x7f2c799760b8>: Failed to establish a new connection: [Errno 111] Connection refused',))

I hope you help me, thanks.