introvoyz041 commited on
Commit
6ef725e
·
verified ·
1 Parent(s): 0bf0caf

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +246 -0
config.json ADDED
@@ -0,0 +1,246 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Lfm2MoeForCausalLM"
4
+ ],
5
+ "auto_map": {
6
+ "AutoConfig": "configuration_lfm2_moe.Lfm2MoeConfig",
7
+ "AutoModelForCausalLM": "modeling_lfm2_moe.Lfm2MoeForCausalLM"
8
+ },
9
+ "bos_token_id": 1,
10
+ "conv_L_cache": 3,
11
+ "conv_bias": false,
12
+ "dtype": "bfloat16",
13
+ "eos_token_id": 7,
14
+ "hidden_size": 2048,
15
+ "intermediate_size": 7168,
16
+ "layer_types": [
17
+ "conv",
18
+ "conv",
19
+ "full_attention",
20
+ "conv",
21
+ "conv",
22
+ "conv",
23
+ "full_attention",
24
+ "conv",
25
+ "conv",
26
+ "conv",
27
+ "full_attention",
28
+ "conv",
29
+ "conv",
30
+ "conv",
31
+ "full_attention",
32
+ "conv",
33
+ "conv",
34
+ "conv",
35
+ "full_attention",
36
+ "conv",
37
+ "conv",
38
+ "full_attention",
39
+ "conv",
40
+ "conv"
41
+ ],
42
+ "max_position_embeddings": 128000,
43
+ "model_type": "lfm2_moe",
44
+ "moe_intermediate_size": 1792,
45
+ "norm_eps": 1e-05,
46
+ "norm_topk_prob": true,
47
+ "num_attention_heads": 32,
48
+ "num_dense_layers": 2,
49
+ "num_experts": 32,
50
+ "num_experts_per_tok": 4,
51
+ "num_hidden_layers": 24,
52
+ "num_key_value_heads": 8,
53
+ "pad_token_id": 0,
54
+ "quantization": {
55
+ "group_size": 64,
56
+ "bits": 8,
57
+ "mode": "affine",
58
+ "model.layers.2.feed_forward.gate": {
59
+ "group_size": 64,
60
+ "bits": 8
61
+ },
62
+ "model.layers.3.feed_forward.gate": {
63
+ "group_size": 64,
64
+ "bits": 8
65
+ },
66
+ "model.layers.4.feed_forward.gate": {
67
+ "group_size": 64,
68
+ "bits": 8
69
+ },
70
+ "model.layers.5.feed_forward.gate": {
71
+ "group_size": 64,
72
+ "bits": 8
73
+ },
74
+ "model.layers.6.feed_forward.gate": {
75
+ "group_size": 64,
76
+ "bits": 8
77
+ },
78
+ "model.layers.7.feed_forward.gate": {
79
+ "group_size": 64,
80
+ "bits": 8
81
+ },
82
+ "model.layers.8.feed_forward.gate": {
83
+ "group_size": 64,
84
+ "bits": 8
85
+ },
86
+ "model.layers.9.feed_forward.gate": {
87
+ "group_size": 64,
88
+ "bits": 8
89
+ },
90
+ "model.layers.10.feed_forward.gate": {
91
+ "group_size": 64,
92
+ "bits": 8
93
+ },
94
+ "model.layers.11.feed_forward.gate": {
95
+ "group_size": 64,
96
+ "bits": 8
97
+ },
98
+ "model.layers.12.feed_forward.gate": {
99
+ "group_size": 64,
100
+ "bits": 8
101
+ },
102
+ "model.layers.13.feed_forward.gate": {
103
+ "group_size": 64,
104
+ "bits": 8
105
+ },
106
+ "model.layers.14.feed_forward.gate": {
107
+ "group_size": 64,
108
+ "bits": 8
109
+ },
110
+ "model.layers.15.feed_forward.gate": {
111
+ "group_size": 64,
112
+ "bits": 8
113
+ },
114
+ "model.layers.16.feed_forward.gate": {
115
+ "group_size": 64,
116
+ "bits": 8
117
+ },
118
+ "model.layers.17.feed_forward.gate": {
119
+ "group_size": 64,
120
+ "bits": 8
121
+ },
122
+ "model.layers.18.feed_forward.gate": {
123
+ "group_size": 64,
124
+ "bits": 8
125
+ },
126
+ "model.layers.19.feed_forward.gate": {
127
+ "group_size": 64,
128
+ "bits": 8
129
+ },
130
+ "model.layers.20.feed_forward.gate": {
131
+ "group_size": 64,
132
+ "bits": 8
133
+ },
134
+ "model.layers.21.feed_forward.gate": {
135
+ "group_size": 64,
136
+ "bits": 8
137
+ },
138
+ "model.layers.22.feed_forward.gate": {
139
+ "group_size": 64,
140
+ "bits": 8
141
+ },
142
+ "model.layers.23.feed_forward.gate": {
143
+ "group_size": 64,
144
+ "bits": 8
145
+ }
146
+ },
147
+ "quantization_config": {
148
+ "group_size": 64,
149
+ "bits": 8,
150
+ "mode": "affine",
151
+ "model.layers.2.feed_forward.gate": {
152
+ "group_size": 64,
153
+ "bits": 8
154
+ },
155
+ "model.layers.3.feed_forward.gate": {
156
+ "group_size": 64,
157
+ "bits": 8
158
+ },
159
+ "model.layers.4.feed_forward.gate": {
160
+ "group_size": 64,
161
+ "bits": 8
162
+ },
163
+ "model.layers.5.feed_forward.gate": {
164
+ "group_size": 64,
165
+ "bits": 8
166
+ },
167
+ "model.layers.6.feed_forward.gate": {
168
+ "group_size": 64,
169
+ "bits": 8
170
+ },
171
+ "model.layers.7.feed_forward.gate": {
172
+ "group_size": 64,
173
+ "bits": 8
174
+ },
175
+ "model.layers.8.feed_forward.gate": {
176
+ "group_size": 64,
177
+ "bits": 8
178
+ },
179
+ "model.layers.9.feed_forward.gate": {
180
+ "group_size": 64,
181
+ "bits": 8
182
+ },
183
+ "model.layers.10.feed_forward.gate": {
184
+ "group_size": 64,
185
+ "bits": 8
186
+ },
187
+ "model.layers.11.feed_forward.gate": {
188
+ "group_size": 64,
189
+ "bits": 8
190
+ },
191
+ "model.layers.12.feed_forward.gate": {
192
+ "group_size": 64,
193
+ "bits": 8
194
+ },
195
+ "model.layers.13.feed_forward.gate": {
196
+ "group_size": 64,
197
+ "bits": 8
198
+ },
199
+ "model.layers.14.feed_forward.gate": {
200
+ "group_size": 64,
201
+ "bits": 8
202
+ },
203
+ "model.layers.15.feed_forward.gate": {
204
+ "group_size": 64,
205
+ "bits": 8
206
+ },
207
+ "model.layers.16.feed_forward.gate": {
208
+ "group_size": 64,
209
+ "bits": 8
210
+ },
211
+ "model.layers.17.feed_forward.gate": {
212
+ "group_size": 64,
213
+ "bits": 8
214
+ },
215
+ "model.layers.18.feed_forward.gate": {
216
+ "group_size": 64,
217
+ "bits": 8
218
+ },
219
+ "model.layers.19.feed_forward.gate": {
220
+ "group_size": 64,
221
+ "bits": 8
222
+ },
223
+ "model.layers.20.feed_forward.gate": {
224
+ "group_size": 64,
225
+ "bits": 8
226
+ },
227
+ "model.layers.21.feed_forward.gate": {
228
+ "group_size": 64,
229
+ "bits": 8
230
+ },
231
+ "model.layers.22.feed_forward.gate": {
232
+ "group_size": 64,
233
+ "bits": 8
234
+ },
235
+ "model.layers.23.feed_forward.gate": {
236
+ "group_size": 64,
237
+ "bits": 8
238
+ }
239
+ },
240
+ "rope_theta": 1000000.0,
241
+ "routed_scaling_factor": 1.0,
242
+ "transformers_version": "4.56.1",
243
+ "use_cache": true,
244
+ "use_expert_bias": true,
245
+ "vocab_size": 65536
246
+ }