Upload folder using huggingface_hub
Browse files- config.json +2 -1
- generation_config.json +1 -1
- model.safetensors +1 -1
- tokenizer.json +0 -0
config.json
CHANGED
|
@@ -1,4 +1,5 @@
|
|
| 1 |
{
|
|
|
|
| 2 |
"activation_dropout": 0.0,
|
| 3 |
"activation_function": "gelu",
|
| 4 |
"apply_spec_augment": false,
|
|
@@ -43,7 +44,7 @@
|
|
| 43 |
"scale_embedding": false,
|
| 44 |
"task": "transcribe",
|
| 45 |
"torch_dtype": "float16",
|
| 46 |
-
"transformers_version": "4.
|
| 47 |
"use_cache": false,
|
| 48 |
"use_weighted_layer_sum": false,
|
| 49 |
"vocab_size": 51866
|
|
|
|
| 1 |
{
|
| 2 |
+
"_name_or_path": "whisper-to-oliver",
|
| 3 |
"activation_dropout": 0.0,
|
| 4 |
"activation_function": "gelu",
|
| 5 |
"apply_spec_augment": false,
|
|
|
|
| 44 |
"scale_embedding": false,
|
| 45 |
"task": "transcribe",
|
| 46 |
"torch_dtype": "float16",
|
| 47 |
+
"transformers_version": "4.46.3",
|
| 48 |
"use_cache": false,
|
| 49 |
"use_weighted_layer_sum": false,
|
| 50 |
"vocab_size": 51866
|
generation_config.json
CHANGED
|
@@ -247,6 +247,6 @@
|
|
| 247 |
"transcribe": 50360,
|
| 248 |
"translate": 50359
|
| 249 |
},
|
| 250 |
-
"transformers_version": "4.
|
| 251 |
"use_cache": false
|
| 252 |
}
|
|
|
|
| 247 |
"transcribe": 50360,
|
| 248 |
"translate": 50359
|
| 249 |
},
|
| 250 |
+
"transformers_version": "4.46.3",
|
| 251 |
"use_cache": false
|
| 252 |
}
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1617824864
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a9d7e0ccf1acbf472196db1685e1fe971763e9117d67355de92231cd92661127
|
| 3 |
size 1617824864
|
tokenizer.json
CHANGED
|
The diff for this file is too large to render.
See raw diff
|
|
|