File "/home/z/tofu/finetune.py", line 96, in main
model = AutoModelForCausalLM.from_pretrained(model_id, use_flash_attention_2=model_cfg["flash_attention2"]=="true", torch_dtype=torch.bfloat16, trust_remote_code = True)
File "/home/z/miniconda3/envs/tofu/lib/python3.9/site-packages/transformers/models/auto/auto_factory.py", line 523, in from_pretrained
config, kwargs = AutoConfig.from_pretrained(
File "/home/z/miniconda3/envs/tofu/lib/python3.9/site-packages/transformers/models/auto/configuration_auto.py", line 1147, in from_pretrained
config_class = get_class_from_dynamic_module(
File "/home/z/miniconda3/envs/tofu/lib/python3.9/site-packages/transformers/dynamic_module_utils.py", line 489, in get_class_from_dynamic_module
final_module = get_cached_module_file(
File "/home/z/miniconda3/envs/tofu/lib/python3.9/site-packages/transformers/dynamic_module_utils.py", line 294, in get_cached_module_file
resolved_module_file = cached_file(
File "/home/z/miniconda3/envs/tofu/lib/python3.9/site-packages/transformers/utils/hub.py", line 452, in cached_file
raise EnvironmentError(
OSError: microsoft/phi-1_5 does not appear to have a file named configuration_phi.py. Checkout 'https://huggingface.co/microsoft/phi-1_5/main' for available files.
I think this change in https://huggingface.co/microsoft/phi-1_5/commit/db561377f8306816df44b57e60a8c0bc4ac67e95 breaks the finetuning command?
I am running:
I now get the following error: