jerrimu commited on
Commit
dbe30ed
·
verified ·
1 Parent(s): 7512721

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +52 -0
config.json ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Ernie4_5_MoeForCausalLM"
4
+ ],
5
+ "attention_probs_dropout_prob": 0.0,
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "hidden_act": "silu",
9
+ "hidden_dropout_prob": 0.0,
10
+ "hidden_size": 2560,
11
+ "ignored_index": -100,
12
+ "intermediate_size": 12288,
13
+ "max_position_embeddings": 131072,
14
+ "max_sequence_length": null,
15
+ "model_type": "ernie4_5_moe",
16
+ "moe_capacity": [
17
+ 52,
18
+ 52,
19
+ 52
20
+ ],
21
+ "moe_dropout_prob": 0.0,
22
+ "moe_gate": "top2",
23
+ "moe_gate_act": "softmax",
24
+ "moe_intermediate_size": 1536,
25
+ "moe_k": 6,
26
+ "moe_layer_end_index": 27,
27
+ "moe_layer_interval": 1,
28
+ "moe_layer_start_index": 1,
29
+ "moe_num_experts": 52,
30
+ "moe_num_shared_experts": 2,
31
+ "moe_use_aux_free": true,
32
+ "multi_token_pred_lambda": 1.0,
33
+ "num_attention_heads": 20,
34
+ "num_hidden_layers": 28,
35
+ "num_key_value_heads": 4,
36
+ "num_nextn_predict_layers": 1,
37
+ "pad_token_id": 0,
38
+ "rms_norm_eps": 1e-05,
39
+ "rope_theta": 500000,
40
+ "sinkhorn_2gate": true,
41
+ "sinkhorn_temp": 0.03,
42
+ "tie_word_embeddings": true,
43
+ "torch_dtype": "bfloat16",
44
+ "transformers_version": "4.55.0",
45
+ "use_bias": false,
46
+ "use_cache": false,
47
+ "use_flash_attention": false,
48
+ "use_moe": true,
49
+ "use_rmsnorm": true,
50
+ "vocab_size": 103424,
51
+ "weight_share_add_bias": true
52
+ }