Update config.json
Browse files- config.json +5 -5
config.json
CHANGED
|
@@ -1,13 +1,13 @@
|
|
| 1 |
{
|
| 2 |
"architectures": [
|
| 3 |
-
"
|
| 4 |
],
|
| 5 |
"attention_bias": false,
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
"auto_map": {
|
| 8 |
-
"AutoConfig": "
|
| 9 |
-
"AutoModel": "
|
| 10 |
-
"AutoModelForCausalLM": "
|
| 11 |
},
|
| 12 |
"bos_token_id": 137204,
|
| 13 |
"dtype": "bfloat16",
|
|
@@ -70,7 +70,7 @@
|
|
| 70 |
"full_attention"
|
| 71 |
],
|
| 72 |
"max_position_embeddings": 131072,
|
| 73 |
-
"model_type": "
|
| 74 |
"moe_intermediate_size": 2048,
|
| 75 |
"n_routed_experts": 128,
|
| 76 |
"n_shared_experts": 1,
|
|
|
|
| 1 |
{
|
| 2 |
"architectures": [
|
| 3 |
+
"VaetkiForCausalLM"
|
| 4 |
],
|
| 5 |
"attention_bias": false,
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
"auto_map": {
|
| 8 |
+
"AutoConfig": "configuration_vaetki.VaetkiConfig",
|
| 9 |
+
"AutoModel": "modeling_vaetki.VaetkiModel",
|
| 10 |
+
"AutoModelForCausalLM": "modeling_vaetki.VaetkiForCausalLM"
|
| 11 |
},
|
| 12 |
"bos_token_id": 137204,
|
| 13 |
"dtype": "bfloat16",
|
|
|
|
| 70 |
"full_attention"
|
| 71 |
],
|
| 72 |
"max_position_embeddings": 131072,
|
| 73 |
+
"model_type": "vaetki",
|
| 74 |
"moe_intermediate_size": 2048,
|
| 75 |
"n_routed_experts": 128,
|
| 76 |
"n_shared_experts": 1,
|