runtime error

Exit code: 1. Reason: s/transformers/models/auto/auto_factory.py", line 380, in from_pretrained return model_class.from_pretrained( File "/usr/local/lib/python3.10/site-packages/transformers/modeling_utils.py", line 4166, in from_pretrained model = cls(config, *model_args, **model_kwargs) File "/home/user/.cache/huggingface/modules/transformers_modules/microsoft/phi_hyphen_3_hyphen_mini_hyphen_4k_hyphen_instruct/f39ac1d28e925b323eae81227eaba4464caced4e/modeling_phi3.py", line 1163, in __init__ self.model = Phi3Model(config) File "/home/user/.cache/huggingface/modules/transformers_modules/microsoft/phi_hyphen_3_hyphen_mini_hyphen_4k_hyphen_instruct/f39ac1d28e925b323eae81227eaba4464caced4e/modeling_phi3.py", line 1004, in __init__ [Phi3DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File "/home/user/.cache/huggingface/modules/transformers_modules/microsoft/phi_hyphen_3_hyphen_mini_hyphen_4k_hyphen_instruct/f39ac1d28e925b323eae81227eaba4464caced4e/modeling_phi3.py", line 1004, in <listcomp> [Phi3DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File "/home/user/.cache/huggingface/modules/transformers_modules/microsoft/phi_hyphen_3_hyphen_mini_hyphen_4k_hyphen_instruct/f39ac1d28e925b323eae81227eaba4464caced4e/modeling_phi3.py", line 796, in __init__ self.self_attn = PHI3_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx=layer_idx) File "/home/user/.cache/huggingface/modules/transformers_modules/microsoft/phi_hyphen_3_hyphen_mini_hyphen_4k_hyphen_instruct/f39ac1d28e925b323eae81227eaba4464caced4e/modeling_phi3.py", line 286, in __init__ self._init_rope() File "/home/user/.cache/huggingface/modules/transformers_modules/microsoft/phi_hyphen_3_hyphen_mini_hyphen_4k_hyphen_instruct/f39ac1d28e925b323eae81227eaba4464caced4e/modeling_phi3.py", line 296, in _init_rope scaling_type = self.config.rope_scaling["type"] KeyError: 'type' Download complete: 100%|██████████| 7.64G/7.64G [00:05<00:00, 1.33GB/s]

Container logs:

Fetching error logs...