Upload tokenizer
Browse files- tokenizer.json +1 -0
- tokenizer_config.json +1 -0
tokenizer.json
CHANGED
|
@@ -382,6 +382,7 @@
|
|
| 382 |
"end_of_word_suffix": "",
|
| 383 |
"fuse_unk": false,
|
| 384 |
"byte_fallback": false,
|
|
|
|
| 385 |
"vocab": {
|
| 386 |
"!": 0,
|
| 387 |
"\"": 1,
|
|
|
|
| 382 |
"end_of_word_suffix": "",
|
| 383 |
"fuse_unk": false,
|
| 384 |
"byte_fallback": false,
|
| 385 |
+
"ignore_merges": false,
|
| 386 |
"vocab": {
|
| 387 |
"!": 0,
|
| 388 |
"\"": 1,
|
tokenizer_config.json
CHANGED
|
@@ -319,6 +319,7 @@
|
|
| 319 |
"eos_token": "<|endoftext|>",
|
| 320 |
"model_max_length": 2048,
|
| 321 |
"pad_token": "<|endoftext|>",
|
|
|
|
| 322 |
"tokenizer_class": "CodeGenTokenizer",
|
| 323 |
"unk_token": "<|endoftext|>"
|
| 324 |
}
|
|
|
|
| 319 |
"eos_token": "<|endoftext|>",
|
| 320 |
"model_max_length": 2048,
|
| 321 |
"pad_token": "<|endoftext|>",
|
| 322 |
+
"return_token_type_ids": false,
|
| 323 |
"tokenizer_class": "CodeGenTokenizer",
|
| 324 |
"unk_token": "<|endoftext|>"
|
| 325 |
}
|