leejunhyeok commited on
Commit
a95fee7
·
verified ·
1 Parent(s): 14c71d5

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -6
config.json CHANGED
@@ -18,12 +18,6 @@
18
  "max_position_embeddings": 16384,
19
  "max_window_layers": 28,
20
  "model_type": "Motif",
21
- "moe_intermediate_size": null,
22
- "moe_layer": false,
23
- "muP": false,
24
- "multi_token_heads": null,
25
- "n_group": null,
26
- "n_routed_experts": null,
27
  "norm_topk_prob": null,
28
  "num_attention_heads": 16,
29
  "num_hidden_layers": 32,
 
18
  "max_position_embeddings": 16384,
19
  "max_window_layers": 28,
20
  "model_type": "Motif",
 
 
 
 
 
 
21
  "norm_topk_prob": null,
22
  "num_attention_heads": 16,
23
  "num_hidden_layers": 32,