Add files using upload-large-folder tool
Browse files- config.json +3 -2
- generation_config.json +1 -1
config.json
CHANGED
|
@@ -1,4 +1,5 @@
|
|
| 1 |
{
|
|
|
|
| 2 |
"architectures": [
|
| 3 |
"Llama4ForCausalLM"
|
| 4 |
],
|
|
@@ -8,6 +9,7 @@
|
|
| 8 |
"attn_scale": 0.1,
|
| 9 |
"attn_temperature_tuning": 4,
|
| 10 |
"bos_token_id": 200000,
|
|
|
|
| 11 |
"eos_token_id": [
|
| 12 |
200001,
|
| 13 |
200007,
|
|
@@ -64,8 +66,7 @@
|
|
| 64 |
"router_aux_loss_coef": 0.001,
|
| 65 |
"router_jitter_noise": 0.0,
|
| 66 |
"tie_word_embeddings": false,
|
| 67 |
-
"
|
| 68 |
-
"transformers_version": "4.53.2",
|
| 69 |
"use_cache": true,
|
| 70 |
"use_qk_norm": true,
|
| 71 |
"vocab_size": 202048
|
|
|
|
| 1 |
{
|
| 2 |
+
"_attn_implementation_autoset": true,
|
| 3 |
"architectures": [
|
| 4 |
"Llama4ForCausalLM"
|
| 5 |
],
|
|
|
|
| 9 |
"attn_scale": 0.1,
|
| 10 |
"attn_temperature_tuning": 4,
|
| 11 |
"bos_token_id": 200000,
|
| 12 |
+
"dtype": "float32",
|
| 13 |
"eos_token_id": [
|
| 14 |
200001,
|
| 15 |
200007,
|
|
|
|
| 66 |
"router_aux_loss_coef": 0.001,
|
| 67 |
"router_jitter_noise": 0.0,
|
| 68 |
"tie_word_embeddings": false,
|
| 69 |
+
"transformers_version": "4.57.3",
|
|
|
|
| 70 |
"use_cache": true,
|
| 71 |
"use_qk_norm": true,
|
| 72 |
"vocab_size": 202048
|
generation_config.json
CHANGED
|
@@ -7,5 +7,5 @@
|
|
| 7 |
200008
|
| 8 |
],
|
| 9 |
"pad_token_id": 200018,
|
| 10 |
-
"transformers_version": "4.
|
| 11 |
}
|
|
|
|
| 7 |
200008
|
| 8 |
],
|
| 9 |
"pad_token_id": 200018,
|
| 10 |
+
"transformers_version": "4.57.3"
|
| 11 |
}
|