add tokenizer
Browse files- special_tokens_map.json +1 -5
- spiece.model +2 -2
- tokenizer.json +0 -0
- tokenizer_config.json +1 -9
special_tokens_map.json
CHANGED
|
@@ -1,5 +1 @@
|
|
| 1 |
-
{
|
| 2 |
-
"eos_token": "</s>",
|
| 3 |
-
"pad_token": "<pad>",
|
| 4 |
-
"unk_token": "<unk>"
|
| 5 |
-
}
|
|
|
|
| 1 |
+
{"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>"}
|
|
|
|
|
|
|
|
|
|
|
|
spiece.model
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c5e0e5c741793a5698e41e8c4389ee638893286e2ba2f530bedffaae215441ec
|
| 3 |
+
size 1018462
|
tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
tokenizer_config.json
CHANGED
|
@@ -1,9 +1 @@
|
|
| 1 |
-
{
|
| 2 |
-
"additional_special_tokens": null,
|
| 3 |
-
"eos_token": "</s>",
|
| 4 |
-
"extra_ids": 0,
|
| 5 |
-
"pad_token": "<pad>",
|
| 6 |
-
"sp_model_kwargs": {},
|
| 7 |
-
"tokenizer_class": "T5Tokenizer",
|
| 8 |
-
"unk_token": "<unk>"
|
| 9 |
-
}
|
|
|
|
| 1 |
+
{"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "extra_ids": 0, "additional_special_tokens": null, "sp_model_kwargs": {}, "special_tokens_map_file": "enrut5-base/special_tokens_map.json", "name_or_path": "enrut5-base/checkpoints/20220810-074144", "tokenizer_class": "T5Tokenizer"}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|