Since I am Unauthorized for url: https://huggingface.co/decapoda-research/llama-7b-h/resolve/main/config.json
loading llm model decapoda-research/llama-7b-h
/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/huggingface_hub/file_download.py:1132: FutureWarning: resume_download is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use force_download=True.
warnings.warn(
Traceback (most recent call last):
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/huggingface_hub/utils/_errors.py", line 304, in hf_raise_for_status
response.raise_for_status()
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/requests/models.py", line 1021, in raise_for_status
raise HTTPError(http_error_msg, response=self)
requests.exceptions.HTTPError: 401 Client Error: Unauthorized for url: https://huggingface.co/decapoda-research/llama-7b-h/resolve/main/config.json
So I use baffo32/decapoda-research-llama-7B-hf but there are errors
pruning starts
loading calibdation data
Traceback (most recent call last):
File "/home/delight-gpu/Workspace2/azuryl/FLAP/main.py", line 109, in
main()
File "/home/delight-gpu/Workspace2/azuryl/FLAP/main.py", line 82, in main
prune_flap(args, model, tokenizer, device)
File "/home/delight-gpu/Workspace2/azuryl/FLAP/lib/prune.py", line 294, in pruneflap
dataloader, = get_loaders("wikitext2", nsamples=args.nsamples,seed=args.seed,seqlen=model.seqlen,tokenizer=tokenizer)
File "/home/delight-gpu/Workspace2/azuryl/FLAP/lib/data.py", line 159, in get_loaders
return get_wikitext2(nsamples, seed, seqlen, tokenizer)
File "/home/delight-gpu/Workspace2/azuryl/FLAP/lib/data.py", line 79, in get_wikitext2
traindata = load_dataset('wikitext', 'wikitext-2-raw-v1', split='train')
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/load.py", line 1767, in load_dataset
builder_instance = load_dataset_builder(
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/load.py", line 1498, in load_dataset_builder
dataset_module = dataset_module_factory(
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/load.py", line 1215, in dataset_module_factory
raise e1 from None
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/load.py", line 1192, in dataset_module_factory
return HubDatasetModuleFactoryWithoutScript(
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/load.py", line 765, in get_module
else get_data_patterns_in_dataset_repository(hfh_dataset_info, self.data_dir)
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/data_files.py", line 675, in get_data_patterns_in_dataset_repository
return _get_data_files_patterns(resolver)
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/data_files.py", line 236, in _get_data_files_patterns
data_files = pattern_resolver(pattern)
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/data_files.py", line 486, in _resolve_single_pattern_in_dataset_repository
glob_iter = [PurePath(filepath) for filepath in fs.glob(PurePath(pattern).as_posix()) if fs.isfile(filepath)]
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/fsspec/spec.py", line 606, in glob
pattern = glob_translate(path + ("/" if ends_with_sep else ""))
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/fsspec/utils.py", line 734, in glob_translate
raise ValueError(
ValueError: Invalid pattern: '**' can only be an entire path component
@an-yongqi Dear An congratulate great job !
Since I am Unauthorized for url: https://huggingface.co/decapoda-research/llama-7b-h/resolve/main/config.json loading llm model decapoda-research/llama-7b-h /home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/huggingface_hub/file_download.py:1132: FutureWarning:
resume_download
is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, useforce_download=True
. warnings.warn( Traceback (most recent call last): File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/huggingface_hub/utils/_errors.py", line 304, in hf_raise_for_status response.raise_for_status() File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/requests/models.py", line 1021, in raise_for_status raise HTTPError(http_error_msg, response=self) requests.exceptions.HTTPError: 401 Client Error: Unauthorized for url: https://huggingface.co/decapoda-research/llama-7b-h/resolve/main/config.jsonSo I use baffo32/decapoda-research-llama-7B-hf but there are errors
pruning starts loading calibdation data Traceback (most recent call last): File "/home/delight-gpu/Workspace2/azuryl/FLAP/main.py", line 109, in
main()
File "/home/delight-gpu/Workspace2/azuryl/FLAP/main.py", line 82, in main
prune_flap(args, model, tokenizer, device)
File "/home/delight-gpu/Workspace2/azuryl/FLAP/lib/prune.py", line 294, in pruneflap
dataloader, = get_loaders("wikitext2", nsamples=args.nsamples,seed=args.seed,seqlen=model.seqlen,tokenizer=tokenizer)
File "/home/delight-gpu/Workspace2/azuryl/FLAP/lib/data.py", line 159, in get_loaders
return get_wikitext2(nsamples, seed, seqlen, tokenizer)
File "/home/delight-gpu/Workspace2/azuryl/FLAP/lib/data.py", line 79, in get_wikitext2
traindata = load_dataset('wikitext', 'wikitext-2-raw-v1', split='train')
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/load.py", line 1767, in load_dataset
builder_instance = load_dataset_builder(
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/load.py", line 1498, in load_dataset_builder
dataset_module = dataset_module_factory(
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/load.py", line 1215, in dataset_module_factory
raise e1 from None
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/load.py", line 1192, in dataset_module_factory
return HubDatasetModuleFactoryWithoutScript(
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/load.py", line 765, in get_module
else get_data_patterns_in_dataset_repository(hfh_dataset_info, self.data_dir)
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/data_files.py", line 675, in get_data_patterns_in_dataset_repository
return _get_data_files_patterns(resolver)
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/data_files.py", line 236, in _get_data_files_patterns
data_files = pattern_resolver(pattern)
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/datasets/data_files.py", line 486, in _resolve_single_pattern_in_dataset_repository
glob_iter = [PurePath(filepath) for filepath in fs.glob(PurePath(pattern).as_posix()) if fs.isfile(filepath)]
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/fsspec/spec.py", line 606, in glob
pattern = glob_translate(path + ("/" if ends_with_sep else ""))
File "/home/azuryl/anaconda3/envs/flap/lib/python3.9/site-packages/fsspec/utils.py", line 734, in glob_translate
raise ValueError(
ValueError: Invalid pattern: '**' can only be an entire path component
can you help me
Thank you