Make sure the test beam size is stored
Browse files- config.json +5 -0
config.json
CHANGED
|
@@ -51,6 +51,11 @@
|
|
| 51 |
"no_repeat_ngram_size": 3,
|
| 52 |
"num_hidden_layers": 6,
|
| 53 |
"pad_token_id": 1,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 54 |
"torch_dtype": "float32",
|
| 55 |
"transformers_version": "4.19.2",
|
| 56 |
"use_cache": true,
|
|
|
|
| 51 |
"no_repeat_ngram_size": 3,
|
| 52 |
"num_hidden_layers": 6,
|
| 53 |
"pad_token_id": 1,
|
| 54 |
+
"task_specific_params": {
|
| 55 |
+
"summarization": {
|
| 56 |
+
"num_beams": 5
|
| 57 |
+
}
|
| 58 |
+
},
|
| 59 |
"torch_dtype": "float32",
|
| 60 |
"transformers_version": "4.19.2",
|
| 61 |
"use_cache": true,
|