Upload Qwen3ForCausalLM
Browse files- config.json +1 -1
- generation_config.json +1 -1
config.json
CHANGED
|
@@ -23,7 +23,7 @@
|
|
| 23 |
"sliding_window": null,
|
| 24 |
"tie_word_embeddings": true,
|
| 25 |
"torch_dtype": "bfloat16",
|
| 26 |
-
"transformers_version": "4.
|
| 27 |
"use_cache": false,
|
| 28 |
"use_sliding_window": false,
|
| 29 |
"vocab_size": 151936
|
|
|
|
| 23 |
"sliding_window": null,
|
| 24 |
"tie_word_embeddings": true,
|
| 25 |
"torch_dtype": "bfloat16",
|
| 26 |
+
"transformers_version": "4.52.1",
|
| 27 |
"use_cache": false,
|
| 28 |
"use_sliding_window": false,
|
| 29 |
"vocab_size": 151936
|
generation_config.json
CHANGED
|
@@ -9,5 +9,5 @@
|
|
| 9 |
"temperature": 0.6,
|
| 10 |
"top_k": 20,
|
| 11 |
"top_p": 0.95,
|
| 12 |
-
"transformers_version": "4.
|
| 13 |
}
|
|
|
|
| 9 |
"temperature": 0.6,
|
| 10 |
"top_k": 20,
|
| 11 |
"top_p": 0.95,
|
| 12 |
+
"transformers_version": "4.52.1"
|
| 13 |
}
|