rkazants commited on
Commit
490ee25
·
verified ·
1 Parent(s): 61f995d

Upload 6 files

Browse files
config.json CHANGED
@@ -1,120 +1,64 @@
1
- {
2
- "adapter_rank": 128,
3
- "add_bias_linear": false,
4
- "architectures": [
5
- "Zamba2ForCausalLM"
6
- ],
7
- "attention_dropout": 0.0,
8
- "attention_head_dim": 1,
9
- "attention_hidden_size": 32,
10
- "bos_token_id": 1,
11
- "chunk_size": 256,
12
- "conv_kernel": 3,
13
- "d_model": 16,
14
- "d_state": 32,
15
- "eos_token_id": 2,
16
- "expand": 2,
17
- "hidden_act": "gelu",
18
- "hidden_size": 16,
19
- "hybrid_layer_ids": [
20
- 6,
21
- 12,
22
- 18,
23
- 24,
24
- 30,
25
- 36,
26
- 42,
27
- 47,
28
- 51
29
- ],
30
- "initializer_range": 0.02,
31
- "intermediate_size": 64,
32
- "kv_channels": 0,
33
- "layers_block_type": [
34
- "mamba",
35
- "mamba",
36
- "mamba",
37
- "mamba",
38
- "mamba",
39
- "mamba",
40
- "hybrid",
41
- "mamba",
42
- "mamba",
43
- "mamba",
44
- "mamba",
45
- "mamba",
46
- "hybrid",
47
- "mamba",
48
- "mamba",
49
- "mamba",
50
- "mamba",
51
- "mamba",
52
- "hybrid",
53
- "mamba",
54
- "mamba",
55
- "mamba",
56
- "mamba",
57
- "mamba",
58
- "hybrid",
59
- "mamba",
60
- "mamba",
61
- "mamba",
62
- "mamba",
63
- "mamba",
64
- "hybrid",
65
- "mamba",
66
- "mamba",
67
- "mamba",
68
- "mamba",
69
- "mamba",
70
- "hybrid",
71
- "mamba",
72
- "mamba",
73
- "mamba",
74
- "mamba",
75
- "mamba",
76
- "hybrid",
77
- "mamba",
78
- "mamba",
79
- "mamba",
80
- "mamba",
81
- "hybrid",
82
- "mamba",
83
- "mamba",
84
- "mamba",
85
- "hybrid",
86
- "mamba",
87
- "mamba"
88
- ],
89
- "mamba_d_conv": 4,
90
- "mamba_d_state": 64,
91
- "mamba_expand": 2,
92
- "mamba_headdim": 4,
93
- "mamba_ngroups": 1,
94
- "max_position_embeddings": 4096,
95
- "model_type": "zamba2",
96
- "n_layer": 46,
97
- "n_mamba_heads": 8,
98
- "num_attention_heads": 32,
99
- "num_hidden_layers": 54,
100
- "num_key_value_heads": 32,
101
- "num_logits_to_keep": 1,
102
- "num_mem_blocks": 1,
103
- "num_query_groups": 32,
104
- "pad_token_id": 0,
105
- "rms_norm_eps": 1e-05,
106
- "rope_theta": 10000,
107
- "time_step_floor": 0.0001,
108
- "time_step_limit": null,
109
- "time_step_max": 0.1,
110
- "time_step_min": 0.001,
111
- "torch_dtype": "float32",
112
- "transformers_version": "4.49.0",
113
- "use_cache": true,
114
- "use_conv_bias": true,
115
- "use_long_context": false,
116
- "use_mem_eff_path": false,
117
- "use_mem_rope": false,
118
- "use_shared_attention_adapter": false,
119
- "vocab_size": 50280
120
- }
 
1
+ {
2
+ "adapter_rank": 128,
3
+ "add_bias_linear": false,
4
+ "architectures": [
5
+ "Zamba2ForCausalLM"
6
+ ],
7
+ "attention_dropout": 0.0,
8
+ "attention_head_dim": 1,
9
+ "attention_hidden_size": 32,
10
+ "bos_token_id": 1,
11
+ "chunk_size": 256,
12
+ "conv_kernel": 3,
13
+ "d_model": 16,
14
+ "d_state": 32,
15
+ "eos_token_id": 2,
16
+ "expand": 2,
17
+ "hidden_act": "gelu",
18
+ "hidden_size": 16,
19
+ "hybrid_layer_ids": [
20
+ 2,
21
+ 4
22
+ ],
23
+ "initializer_range": 0.02,
24
+ "intermediate_size": 64,
25
+ "kv_channels": 0,
26
+ "layers_block_type": [
27
+ "mamba",
28
+ "mamba",
29
+ "hybrid",
30
+ "mamba",
31
+ "hybrid",
32
+ "mamba"
33
+ ],
34
+ "mamba_d_conv": 4,
35
+ "mamba_d_state": 64,
36
+ "mamba_expand": 2,
37
+ "mamba_headdim": 4,
38
+ "mamba_ngroups": 1,
39
+ "max_position_embeddings": 4096,
40
+ "model_type": "zamba2",
41
+ "n_mamba_heads": 8,
42
+ "num_attention_heads": 32,
43
+ "num_hidden_layers": 6,
44
+ "num_key_value_heads": 32,
45
+ "num_logits_to_keep": 1,
46
+ "num_mem_blocks": 1,
47
+ "num_query_groups": 32,
48
+ "pad_token_id": 0,
49
+ "rms_norm_eps": 1e-05,
50
+ "rope_theta": 10000,
51
+ "time_step_floor": 0.0001,
52
+ "time_step_limit": null,
53
+ "time_step_max": 0.1,
54
+ "time_step_min": 0.001,
55
+ "torch_dtype": "float32",
56
+ "transformers_version": "4.55.4",
57
+ "use_cache": true,
58
+ "use_conv_bias": true,
59
+ "use_long_context": false,
60
+ "use_mem_eff_path": false,
61
+ "use_mem_rope": false,
62
+ "use_shared_attention_adapter": false,
63
+ "vocab_size": 50280
64
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
generation_config.json CHANGED
@@ -1,7 +1,7 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 1,
4
- "eos_token_id": 2,
5
- "pad_token_id": 0,
6
- "transformers_version": "4.49.0"
7
- }
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.55.4"
7
+ }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ba10694d9b76fc1f9948b2a6c29042a4a0b9e2a5434fd289b4a284d218fb3c37
3
- size 5100534
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14b8d61dc17d817be575ac4c822f73555f99ac7aa40b0e13ee03e3dd2314d470
3
+ size 3528537
special_tokens_map.json CHANGED
@@ -1,23 +1,23 @@
1
- {
2
- "bos_token": {
3
- "content": "<s>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "</s>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "unk_token": {
17
- "content": "<unk>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- }
23
- }
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "unk_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer_config.json CHANGED
@@ -1,44 +1,44 @@
1
- {
2
- "add_bos_token": true,
3
- "add_eos_token": false,
4
- "add_prefix_space": null,
5
- "added_tokens_decoder": {
6
- "0": {
7
- "content": "<unk>",
8
- "lstrip": false,
9
- "normalized": false,
10
- "rstrip": false,
11
- "single_word": false,
12
- "special": true
13
- },
14
- "1": {
15
- "content": "<s>",
16
- "lstrip": false,
17
- "normalized": false,
18
- "rstrip": false,
19
- "single_word": false,
20
- "special": true
21
- },
22
- "2": {
23
- "content": "</s>",
24
- "lstrip": false,
25
- "normalized": false,
26
- "rstrip": false,
27
- "single_word": false,
28
- "special": true
29
- }
30
- },
31
- "additional_special_tokens": [],
32
- "bos_token": "<s>",
33
- "clean_up_tokenization_spaces": false,
34
- "eos_token": "</s>",
35
- "extra_special_tokens": {},
36
- "legacy": true,
37
- "model_max_length": 1000000000000000019884624838656,
38
- "pad_token": null,
39
- "sp_model_kwargs": {},
40
- "spaces_between_special_tokens": false,
41
- "tokenizer_class": "LlamaTokenizerFast",
42
- "unk_token": "<unk>",
43
- "use_default_system_prompt": false
44
- }
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ }
30
+ },
31
+ "additional_special_tokens": [],
32
+ "bos_token": "<s>",
33
+ "clean_up_tokenization_spaces": false,
34
+ "eos_token": "</s>",
35
+ "extra_special_tokens": {},
36
+ "legacy": true,
37
+ "model_max_length": 1000000000000000019884624838656,
38
+ "pad_token": null,
39
+ "sp_model_kwargs": {},
40
+ "spaces_between_special_tokens": false,
41
+ "tokenizer_class": "LlamaTokenizerFast",
42
+ "unk_token": "<unk>",
43
+ "use_default_system_prompt": false
44
+ }