Upload tokenizer
cf285f9 verified | | { |
| | "version": "1.0", |
| | "truncation": null, |
| | "padding": null, |
| | "added_tokens": [ |
| | { |
| | "id": 0, |
| | "content": "<pad>", |
| | "single_word": false, |
| | "lstrip": false, |
| | "rstrip": false, |
| | "normalized": false, |
| | "special": true |
| | }, |
| | { |
| | "id": 1, |
| | "content": "<bos>", |
| | "single_word": false, |
| | "lstrip": false, |
| | "rstrip": false, |
| | "normalized": false, |
| | "special": true |
| | }, |
| | { |
| | "id": 2, |
| | "content": "<eos>", |
| | "single_word": false, |
| | "lstrip": false, |
| | "rstrip": false, |
| | "normalized": false, |
| | "special": true |
| | }, |
| | { |
| | "id": 3, |
| | "content": "<unk>", |
| | "single_word": false, |
| | "lstrip": false, |
| | "rstrip": false, |
| | "normalized": false, |
| | "special": true |
| | } |
| | ], |
| | "normalizer": null, |
| | "pre_tokenizer": { |
| | "type": "Split", |
| | "pattern": { |
| | "Regex": "[\\s\\S]" |
| | }, |
| | "behavior": "Isolated", |
| | "invert": false |
| | }, |
| | "post_processor": null, |
| | "decoder": { |
| | "type": "Fuse" |
| | }, |
| | "model": { |
| | "type": "WordLevel", |
| | "vocab": { |
| | "<pad>": 0, |
| | "<bos>": 1, |
| | "<eos>": 2, |
| | "<unk>": 3, |
| | "\n": 4, |
| | "%": 5, |
| | "*": 6, |
| | "+": 7, |
| | "-": 8, |
| | "/": 9, |
| | "0": 10, |
| | "1": 11, |
| | "2": 12, |
| | "3": 13, |
| | "4": 14, |
| | "5": 15, |
| | "6": 16, |
| | "7": 17, |
| | "8": 18, |
| | "9": 19, |
| | "=": 20 |
| | }, |
| | "unk_token": "<unk>" |
| | } |
| | } |