You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
环境:Linux
显卡:3090
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
Traceback (most recent call last):
File "/Baichuan2/tune_demo.py", line 311, in
model = AutoModelForCausalLM.from_pretrained(model_name_or_path, device_map="auto",
File "/opt/conda/lib/python3.9/site-packages/transformers/models/auto/auto_factory.py", line 560, in from_pretrained
return model_class.from_pretrained(
File "/root/.cache/huggingface/modules/transformers_modules/baichuan2_torchkeras/modeling_baichuan.py", line 670, in from_pretrained
return super(BaichuanForCausalLM, cls).from_pretrained(pretrained_model_name_or_path, *model_args,
File "/opt/conda/lib/python3.9/site-packages/transformers/modeling_utils.py", line 3307, in from_pretrained
) = cls._load_pretrained_model(
File "/opt/conda/lib/python3.9/site-packages/transformers/modeling_utils.py", line 3695, in _load_pretrained_model
new_error_msgs, offload_index, state_dict_index = _load_state_dict_into_meta_model(
File "/opt/conda/lib/python3.9/site-packages/transformers/modeling_utils.py", line 741, in _load_state_dict_into_meta_model
set_module_tensor_to_device(model, param_name, param_device, **set_module_kwargs)
File "/opt/conda/lib/python3.9/site-packages/accelerate/utils/modeling.py", line 313, in set_module_tensor_to_device
new_value = value.to(device)
NotImplementedError: Cannot copy out of meta tensor; no data!
The text was updated successfully, but these errors were encountered:
环境:Linux
显卡:3090
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
Traceback (most recent call last):
File "/Baichuan2/tune_demo.py", line 311, in
model = AutoModelForCausalLM.from_pretrained(model_name_or_path, device_map="auto",
File "/opt/conda/lib/python3.9/site-packages/transformers/models/auto/auto_factory.py", line 560, in from_pretrained
return model_class.from_pretrained(
File "/root/.cache/huggingface/modules/transformers_modules/baichuan2_torchkeras/modeling_baichuan.py", line 670, in from_pretrained
return super(BaichuanForCausalLM, cls).from_pretrained(pretrained_model_name_or_path, *model_args,
File "/opt/conda/lib/python3.9/site-packages/transformers/modeling_utils.py", line 3307, in from_pretrained
) = cls._load_pretrained_model(
File "/opt/conda/lib/python3.9/site-packages/transformers/modeling_utils.py", line 3695, in _load_pretrained_model
new_error_msgs, offload_index, state_dict_index = _load_state_dict_into_meta_model(
File "/opt/conda/lib/python3.9/site-packages/transformers/modeling_utils.py", line 741, in _load_state_dict_into_meta_model
set_module_tensor_to_device(model, param_name, param_device, **set_module_kwargs)
File "/opt/conda/lib/python3.9/site-packages/accelerate/utils/modeling.py", line 313, in set_module_tensor_to_device
new_value = value.to(device)
NotImplementedError: Cannot copy out of meta tensor; no data!
The text was updated successfully, but these errors were encountered: