Text Generation
Transformers
Safetensors
llada2_moe
conversational
custom_code

fun

#2
by malv-c - opened

File "/2/llm/m/DD/dm", line 2, in
model = AutoModelForCausalLM.from_pretrained("Zigeng/DMax-Math-16B", trust_remote_code=True, dtype="auto")
File "/home/void/.pyenv/versions/3.13.9/lib/python3.13/site-packages/transformers/models/auto/auto_factory.py", line 356, in from_pretrained
model_class = get_class_from_dynamic_module(
class_ref, pretrained_model_name_or_path, code_revision=code_revision, **hub_kwargs, **kwargs
)
File "/home/void/.pyenv/versions/3.13.9/lib/python3.13/site-packages/transformers/dynamic_module_utils.py", line 583, in get_class_from_dynamic_module
return get_class_in_module(class_name, final_module, force_reload=force_download)
File "/home/void/.pyenv/versions/3.13.9/lib/python3.13/site-packages/transformers/dynamic_module_utils.py", line 309, in get_class_in_module
module_spec.loader.exec_module(module)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^
File "", line 1027, in exec_module
File "", line 488, in _call_with_frames_removed
File "/home/void/.cache/huggingface/modules/transformers_modules/Zigeng/DMax_hyphen_Math_hyphen_16B/75d957038b906c71ec8e8b12a257be471df8b10e/modeling_llada2_moe.py", line 53, in
from transformers.utils.import_utils import is_torch_fx_available
ImportError: cannot import name 'is_torch_fx_available' from 'transformers.utils.import_utils' (/home/void/.pyenv/versions/3.13.9/lib/python3.13/site-packages/transformers/utils/import_utils.py)
3.13.9.pyenv:/2/llm/m/DD
%

Sign up or log in to comment