NotImplementedError: Cannot copy out of meta tensor; no data!
decoder_model = AutoModelForCausalLM.from_pretrained(
"apple/OpenELM-270M",
torch_dtype=torch.float32,
trust_remote_code=True,
)
leads to
File "/tmp/.xdg_cache_kalvinchang/huggingface/modules/transformers_modules/apple/OpenELM_hyphen_270M/e2b9003235d55a404567faf06e74883081c65e65/modeling_openelm.py", line 808, in init
self.transformer = OpenELMModel(config)
File "/tmp/.xdg_cache_kalvinchang/huggingface/modules/transformers_modules/apple/OpenELM_hyphen_270M/e2b9003235d55a404567faf06e74883081c65e65/modeling_openelm.py", line 548, in init
self.layers = nn.ModuleList(
File "/scratch/users/kalvinchang/conda/envs/flow-slm/lib/python3.10/site-packages/torch/nn/modules/container.py", line 364, in init
self += modules
File "/scratch/users/kalvinchang/conda/envs/flow-slm/lib/python3.10/site-packages/torch/nn/modules/container.py", line 413, in iadd
return self.extend(modules)
File "/scratch/users/kalvinchang/conda/envs/flow-slm/lib/python3.10/site-packages/torch/nn/modules/container.py", line 498, in extend
for i, module in enumerate(modules):
File "/tmp/.xdg_cache_kalvinchang/huggingface/modules/transformers_modules/apple/OpenELM_hyphen_270M/e2b9003235d55a404567faf06e74883081c65e65/modeling_openelm.py", line 549, in
OpenELMDecoderLayer(config=config, layer_idx=layer_idx)
File "/tmp/.xdg_cache_kalvinchang/huggingface/modules/transformers_modules/apple/OpenELM_hyphen_270M/e2b9003235d55a404567faf06e74883081c65e65/modeling_openelm.py", line 468, in init
self.attn = OpenELMMultiHeadCausalAttention(config=config, layer_idx=layer_idx)
File "/tmp/.xdg_cache_kalvinchang/huggingface/modules/transformers_modules/apple/OpenELM_hyphen_270M/e2b9003235d55a404567faf06e74883081c65e65/modeling_openelm.py", line 278, in init
self.pos_embedding = OpenELMRotaryEmbedding(
File "/tmp/.xdg_cache_kalvinchang/huggingface/modules/transformers_modules/apple/OpenELM_hyphen_270M/e2b9003235d55a404567faf06e74883081c65e65/modeling_openelm.py", line 146, in init
self._compute_sin_cos_embeddings(max_seq_length)
File "/tmp/.xdg_cache_kalvinchang/huggingface/modules/transformers_modules/apple/OpenELM_hyphen_270M/e2b9003235d55a404567faf06e74883081c65e65/modeling_openelm.py", line 197, in _compute_sin_cos_embeddings
cos_emb = emb.cos().to(dtype=key_dtype, device=key_device)
File "/scratch/users/kalvinchang/conda/envs/flow-slm/lib/python3.10/site-packages/torch/utils/_device.py", line 109, in torch_function
return func(*args, **kwargs)
NotImplementedError: Cannot copy out of meta tensor; no data!
The solution (as found by https://github.com/Cicicai379) is to comment out
self._compute_sin_cos_embeddings(max_seq_length) in the initialization