nc-ai-consortium commited on
Commit
93dfe91
·
verified ·
1 Parent(s): 62c1061

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -5
config.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
  "architectures": [
3
- "WBLForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "auto_map": {
8
- "AutoConfig": "configuration_wbl.WBLConfig",
9
- "AutoModel": "modeling_wbl.WBLModel",
10
- "AutoModelForCausalLM": "modeling_wbl.WBLForCausalLM"
11
  },
12
  "bos_token_id": 137204,
13
  "dtype": "bfloat16",
@@ -70,7 +70,7 @@
70
  "full_attention"
71
  ],
72
  "max_position_embeddings": 131072,
73
- "model_type": "wbl",
74
  "moe_intermediate_size": 2048,
75
  "n_routed_experts": 128,
76
  "n_shared_experts": 1,
 
1
  {
2
  "architectures": [
3
+ "VaetkiForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "auto_map": {
8
+ "AutoConfig": "configuration_vaetki.VaetkiConfig",
9
+ "AutoModel": "modeling_vaetki.VaetkiModel",
10
+ "AutoModelForCausalLM": "modeling_vaetki.VaetkiForCausalLM"
11
  },
12
  "bos_token_id": 137204,
13
  "dtype": "bfloat16",
 
70
  "full_attention"
71
  ],
72
  "max_position_embeddings": 131072,
73
+ "model_type": "vaetki",
74
  "moe_intermediate_size": 2048,
75
  "n_routed_experts": 128,
76
  "n_shared_experts": 1,