ValueError: When passing device_map as a string, the value needs to be a device name (e.g. cpu, cuda:0) or 'auto', 'balanced', 'balanced_low_0', 'sequential' but found . #33
`
Traceback (most recent call last):
File "/home/hasan/LLaVA/lmms-eval/lmms_eval/main.py", line 199, in cli_evaluate
results, samples = cli_evaluate_single(args)
File "/home/hasan/LLaVA/lmms-eval/lmms_eval/main.py", line 283, in cli_evaluate_single
results = evaluator.simple_evaluate(
File "/home/hasan/LLaVA/lmms-eval/lmms_eval/utils.py", line 448, in _wrapper
return fn(*args, kwargs)
File "/home/hasan/LLaVA/lmms-eval/lmms_eval/evaluator.py", line 93, in simple_evaluate
lm = lmms_eval.api.registry.get_model(model).create_from_arg_string(
File "/home/hasan/LLaVA/lmms-eval/lmms_eval/api/model.py", line 92, in create_from_arg_string
return cls(args, **args2)
File "/home/hasan/LLaVA/lmms-eval/lmms_eval/models/llava.py", line 75, in init
self._tokenizer, self._model, self._image_processor, self._max_length = load_pretrained_model(pretrained, None, get_model_name_from_path(pretrained), device_map=self.device_map, use_flash_attention_2=use_flash_attention_2)
File "/home/hasan/LLaVA/llava/model/builder.py", line 117, in load_pretrained_model
model = LlavaLlamaForCausalLM.from_pretrained(
File "/home/hasan/miniconda3/envs/llava/lib/python3.10/site-packages/transformers/modeling_utils.py", line 2972, in from_pretrained
raise ValueError(
ValueError: When passing device_map as a string, the value needs to be a device name (e.g. cpu, cuda:0) or 'auto', 'balanced', 'balanced_low_0', 'sequential' but found .
I am trying to replicate this evaluation task following this script: https://github.com/EvolvingLMMs-Lab/lmms-eval/blob/main/miscs/repr_scripts.sh
I have one Nvidia P40 GPU (CUDA 12.4)
This is the error I am facing
` Traceback (most recent call last): File "/home/hasan/LLaVA/lmms-eval/lmms_eval/main.py", line 199, in cli_evaluate results, samples = cli_evaluate_single(args) File "/home/hasan/LLaVA/lmms-eval/lmms_eval/main.py", line 283, in cli_evaluate_single results = evaluator.simple_evaluate( File "/home/hasan/LLaVA/lmms-eval/lmms_eval/utils.py", line 448, in _wrapper return fn(*args, kwargs) File "/home/hasan/LLaVA/lmms-eval/lmms_eval/evaluator.py", line 93, in simple_evaluate lm = lmms_eval.api.registry.get_model(model).create_from_arg_string( File "/home/hasan/LLaVA/lmms-eval/lmms_eval/api/model.py", line 92, in create_from_arg_string return cls(args, **args2) File "/home/hasan/LLaVA/lmms-eval/lmms_eval/models/llava.py", line 75, in init self._tokenizer, self._model, self._image_processor, self._max_length = load_pretrained_model(pretrained, None, get_model_name_from_path(pretrained), device_map=self.device_map, use_flash_attention_2=use_flash_attention_2) File "/home/hasan/LLaVA/llava/model/builder.py", line 117, in load_pretrained_model model = LlavaLlamaForCausalLM.from_pretrained( File "/home/hasan/miniconda3/envs/llava/lib/python3.10/site-packages/transformers/modeling_utils.py", line 2972, in from_pretrained raise ValueError( ValueError: When passing device_map as a string, the value needs to be a device name (e.g. cpu, cuda:0) or 'auto', 'balanced', 'balanced_low_0', 'sequential' but found .
`