bibproj commited on
Commit
e12510c
·
verified ·
1 Parent(s): 07be91a
Files changed (2) hide show
  1. config.json +4 -4
  2. model.safetensors.index.json +0 -0
config.json CHANGED
@@ -144,7 +144,7 @@
144
  },
145
  "model.layers.1.mlp.switch_mlp.down_proj": {
146
  "group_size": 64,
147
- "bits": 3
148
  },
149
  "model.layers.1.mlp.shared_experts.gate_proj": {
150
  "group_size": 64,
@@ -188,7 +188,7 @@
188
  },
189
  "model.layers.2.mlp.switch_mlp.down_proj": {
190
  "group_size": 64,
191
- "bits": 3
192
  },
193
  "model.layers.2.mlp.shared_experts.gate_proj": {
194
  "group_size": 64,
@@ -2828,7 +2828,7 @@
2828
  },
2829
  "model.layers.1.mlp.switch_mlp.down_proj": {
2830
  "group_size": 64,
2831
- "bits": 3
2832
  },
2833
  "model.layers.1.mlp.shared_experts.gate_proj": {
2834
  "group_size": 64,
@@ -2872,7 +2872,7 @@
2872
  },
2873
  "model.layers.2.mlp.switch_mlp.down_proj": {
2874
  "group_size": 64,
2875
- "bits": 3
2876
  },
2877
  "model.layers.2.mlp.shared_experts.gate_proj": {
2878
  "group_size": 64,
 
144
  },
145
  "model.layers.1.mlp.switch_mlp.down_proj": {
146
  "group_size": 64,
147
+ "bits": 6
148
  },
149
  "model.layers.1.mlp.shared_experts.gate_proj": {
150
  "group_size": 64,
 
188
  },
189
  "model.layers.2.mlp.switch_mlp.down_proj": {
190
  "group_size": 64,
191
+ "bits": 6
192
  },
193
  "model.layers.2.mlp.shared_experts.gate_proj": {
194
  "group_size": 64,
 
2828
  },
2829
  "model.layers.1.mlp.switch_mlp.down_proj": {
2830
  "group_size": 64,
2831
+ "bits": 6
2832
  },
2833
  "model.layers.1.mlp.shared_experts.gate_proj": {
2834
  "group_size": 64,
 
2872
  },
2873
  "model.layers.2.mlp.switch_mlp.down_proj": {
2874
  "group_size": 64,
2875
+ "bits": 6
2876
  },
2877
  "model.layers.2.mlp.shared_experts.gate_proj": {
2878
  "group_size": 64,
model.safetensors.index.json CHANGED
The diff for this file is too large to render. See raw diff