Echo9Zulu commited on
Commit
2df89d0
·
verified ·
1 Parent(s): 0ca76ab

Upload 12 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
chat_template.jinja ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- set today = strftime_now("%Y-%m-%d") %}
2
+ {%- set default_system_message = "You are Mistral Small 3, a Large Language Model (LLM) created by Mistral AI, a French startup headquartered in Paris.\nYour knowledge base was last updated on 2023-10-01. The current date is " + today + ".\n\nWhen you're not sure about some information, you say that you don't have the information and don't make up anything.\nIf the user's question is not clear, ambiguous, or does not provide enough context for you to accurately answer the question, you do not try to answer it right away and you rather ask the user to clarify their request (e.g. \"What are some good restaurants around me?\" => \"Where are you?\" or \"When is the next flight to Tokyo\" => \"Where do you travel from?\")" %}
3
+
4
+ {{- bos_token }}
5
+
6
+ {%- if messages[0]['role'] == 'system' %}
7
+ {%- if messages[0]['content'] is string %}
8
+ {%- set system_message = messages[0]['content'] %}
9
+ {%- else %}
10
+ {%- set system_message = messages[0]['content'][0]['text'] %}
11
+ {%- endif %}
12
+ {%- set loop_messages = messages[1:] %}
13
+ {%- else %}
14
+ {%- set system_message = default_system_message %}
15
+ {%- set loop_messages = messages %}
16
+ {%- endif %}
17
+ {{- '[SYSTEM_PROMPT]' + system_message + '[/SYSTEM_PROMPT]' }}
18
+
19
+ {%- for message in loop_messages %}
20
+ {%- if message['role'] == 'user' %}
21
+ {%- if message['content'] is string %}
22
+ {{- '[INST]' + message['content'] + '[/INST]' }}
23
+ {%- else %}
24
+ {{- '[INST]' }}
25
+ {%- for block in message['content'] %}
26
+ {%- if block['type'] == 'text' %}
27
+ {{- block['text'] }}
28
+ {%- elif block['type'] in ['image', 'image_url'] %}
29
+ {{- '[IMG]' }}
30
+ {%- else %}
31
+ {{- raise_exception('Only text and image blocks are supported in message content!') }}
32
+ {%- endif %}
33
+ {%- endfor %}
34
+ {{- '[/INST]' }}
35
+ {%- endif %}
36
+ {%- elif message['role'] == 'system' %}
37
+ {%- if message['content'] is string %}
38
+ {{- '[SYSTEM_PROMPT]' + message['content'] + '[/SYSTEM_PROMPT]' }}
39
+ {%- else %}
40
+ {{- '[SYSTEM_PROMPT]' + message['content'][0]['text'] + '[/SYSTEM_PROMPT]' }}
41
+ {%- endif %}
42
+ {%- elif message['role'] == 'assistant' %}
43
+ {%- if message['content'] is string %}
44
+ {{- message['content'] + eos_token }}
45
+ {%- else %}
46
+ {{- message['content'][0]['text'] + eos_token }}
47
+ {%- endif %}
48
+ {%- else %}
49
+ {{- raise_exception('Only user, system and assistant roles are supported!') }}
50
+ {%- endif %}
51
+ {%- endfor %}
config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "MistralForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 1,
7
+ "dtype": "bfloat16",
8
+ "eos_token_id": 2,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 5120,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 32768,
14
+ "max_position_embeddings": 131072,
15
+ "model_type": "mistral",
16
+ "num_attention_heads": 32,
17
+ "num_hidden_layers": 40,
18
+ "num_key_value_heads": 8,
19
+ "rms_norm_eps": 1e-05,
20
+ "rope_theta": 1000000000.0,
21
+ "sliding_window": null,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.52.4",
25
+ "use_cache": true,
26
+ "vocab_size": 131072
27
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "do_sample": true,
5
+ "eos_token_id": 2,
6
+ "transformers_version": "4.52.4"
7
+ }
openvino_detokenizer.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15b0bc5c8b7fe3517509f9d9942b3933902543f032c5fb67c87525c49719f302
3
+ size 1943694
openvino_detokenizer.xml ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="detokenizer" version="11">
3
+ <layers>
4
+ <layer id="0" name="Parameter_116781" type="Parameter" version="opset1">
5
+ <data shape="?,?" element_type="i64" />
6
+ <output>
7
+ <port id="0" precision="I64" names="Parameter_116781">
8
+ <dim>-1</dim>
9
+ <dim>-1</dim>
10
+ </port>
11
+ </output>
12
+ </layer>
13
+ <layer id="1" name="Convert_116980" type="Convert" version="opset1">
14
+ <data destination_type="i32" />
15
+ <input>
16
+ <port id="0" precision="I64">
17
+ <dim>-1</dim>
18
+ <dim>-1</dim>
19
+ </port>
20
+ </input>
21
+ <output>
22
+ <port id="1" precision="I32">
23
+ <dim>-1</dim>
24
+ <dim>-1</dim>
25
+ </port>
26
+ </output>
27
+ </layer>
28
+ <layer id="2" name="Constant_116783" type="Const" version="opset1">
29
+ <data element_type="i32" shape="131072" offset="0" size="524288" />
30
+ <output>
31
+ <port id="0" precision="I32">
32
+ <dim>131072</dim>
33
+ </port>
34
+ </output>
35
+ </layer>
36
+ <layer id="3" name="Constant_116785" type="Const" version="opset1">
37
+ <data element_type="i32" shape="131072" offset="524288" size="524288" />
38
+ <output>
39
+ <port id="0" precision="I32">
40
+ <dim>131072</dim>
41
+ </port>
42
+ </output>
43
+ </layer>
44
+ <layer id="4" name="Constant_116787" type="Const" version="opset1">
45
+ <data element_type="u8" shape="891118" offset="1048576" size="891118" />
46
+ <output>
47
+ <port id="0" precision="U8">
48
+ <dim>891118</dim>
49
+ </port>
50
+ </output>
51
+ </layer>
52
+ <layer id="5" name="Slice_116792" type="Const" version="opset1">
53
+ <data element_type="i32" shape="1000" offset="1939694" size="4000" />
54
+ <output>
55
+ <port id="0" precision="I32">
56
+ <dim>1000</dim>
57
+ </port>
58
+ </output>
59
+ </layer>
60
+ <layer id="6" name="VocabDecoder_116794" type="VocabDecoder" version="extension">
61
+ <data skip_tokens="" />
62
+ <input>
63
+ <port id="0" precision="I32">
64
+ <dim>-1</dim>
65
+ <dim>-1</dim>
66
+ </port>
67
+ <port id="1" precision="I32">
68
+ <dim>131072</dim>
69
+ </port>
70
+ <port id="2" precision="I32">
71
+ <dim>131072</dim>
72
+ </port>
73
+ <port id="3" precision="U8">
74
+ <dim>891118</dim>
75
+ </port>
76
+ <port id="4" precision="I32">
77
+ <dim>1000</dim>
78
+ </port>
79
+ </input>
80
+ <output>
81
+ <port id="5" precision="I32">
82
+ <dim>-1</dim>
83
+ </port>
84
+ <port id="6" precision="I32">
85
+ <dim>-1</dim>
86
+ </port>
87
+ <port id="7" precision="I32">
88
+ <dim>-1</dim>
89
+ </port>
90
+ <port id="8" precision="I32">
91
+ <dim>-1</dim>
92
+ </port>
93
+ <port id="9" precision="U8">
94
+ <dim>-1</dim>
95
+ </port>
96
+ </output>
97
+ </layer>
98
+ <layer id="7" name="FuzeRagged_116795" type="FuzeRagged" version="extension">
99
+ <input>
100
+ <port id="0" precision="I32">
101
+ <dim>-1</dim>
102
+ </port>
103
+ <port id="1" precision="I32">
104
+ <dim>-1</dim>
105
+ </port>
106
+ <port id="2" precision="I32">
107
+ <dim>-1</dim>
108
+ </port>
109
+ <port id="3" precision="I32">
110
+ <dim>-1</dim>
111
+ </port>
112
+ </input>
113
+ <output>
114
+ <port id="4" precision="I32">
115
+ <dim>-1</dim>
116
+ </port>
117
+ <port id="5" precision="I32">
118
+ <dim>-1</dim>
119
+ </port>
120
+ </output>
121
+ </layer>
122
+ <layer id="8" name="UTF8Validate_116796" type="UTF8Validate" version="extension">
123
+ <data replace_mode="true" />
124
+ <input>
125
+ <port id="0" precision="I32">
126
+ <dim>-1</dim>
127
+ </port>
128
+ <port id="1" precision="I32">
129
+ <dim>-1</dim>
130
+ </port>
131
+ <port id="2" precision="U8">
132
+ <dim>-1</dim>
133
+ </port>
134
+ </input>
135
+ <output>
136
+ <port id="3" precision="I32">
137
+ <dim>-1</dim>
138
+ </port>
139
+ <port id="4" precision="I32">
140
+ <dim>-1</dim>
141
+ </port>
142
+ <port id="5" precision="U8">
143
+ <dim>-1</dim>
144
+ </port>
145
+ </output>
146
+ </layer>
147
+ <layer id="9" name="StringTensorPack_116797" type="StringTensorPack" version="opset15">
148
+ <input>
149
+ <port id="0" precision="I32">
150
+ <dim>-1</dim>
151
+ </port>
152
+ <port id="1" precision="I32">
153
+ <dim>-1</dim>
154
+ </port>
155
+ <port id="2" precision="U8">
156
+ <dim>-1</dim>
157
+ </port>
158
+ </input>
159
+ <output>
160
+ <port id="3" precision="STRING" names="Result_116798,string_output">
161
+ <dim>-1</dim>
162
+ </port>
163
+ </output>
164
+ </layer>
165
+ <layer id="10" name="Result_116798" type="Result" version="opset1" output_names="Result_116798,string_output">
166
+ <input>
167
+ <port id="0" precision="STRING">
168
+ <dim>-1</dim>
169
+ </port>
170
+ </input>
171
+ </layer>
172
+ </layers>
173
+ <edges>
174
+ <edge from-layer="0" from-port="0" to-layer="1" to-port="0" />
175
+ <edge from-layer="1" from-port="1" to-layer="6" to-port="0" />
176
+ <edge from-layer="2" from-port="0" to-layer="6" to-port="1" />
177
+ <edge from-layer="3" from-port="0" to-layer="6" to-port="2" />
178
+ <edge from-layer="4" from-port="0" to-layer="6" to-port="3" />
179
+ <edge from-layer="5" from-port="0" to-layer="6" to-port="4" />
180
+ <edge from-layer="6" from-port="7" to-layer="7" to-port="2" />
181
+ <edge from-layer="6" from-port="9" to-layer="8" to-port="2" />
182
+ <edge from-layer="6" from-port="8" to-layer="7" to-port="3" />
183
+ <edge from-layer="6" from-port="6" to-layer="7" to-port="1" />
184
+ <edge from-layer="6" from-port="5" to-layer="7" to-port="0" />
185
+ <edge from-layer="7" from-port="4" to-layer="8" to-port="0" />
186
+ <edge from-layer="7" from-port="5" to-layer="8" to-port="1" />
187
+ <edge from-layer="8" from-port="3" to-layer="9" to-port="0" />
188
+ <edge from-layer="8" from-port="4" to-layer="9" to-port="1" />
189
+ <edge from-layer="8" from-port="5" to-layer="9" to-port="2" />
190
+ <edge from-layer="9" from-port="3" to-layer="10" to-port="0" />
191
+ </edges>
192
+ <rt_info>
193
+ <add_attention_mask value="True" />
194
+ <add_prefix_space />
195
+ <add_special_tokens value="True" />
196
+ <bos_token_id value="1" />
197
+ <chat_template value="{%- set today = strftime_now(&quot;%Y-%m-%d&quot;) %}&#10;{%- set default_system_message = &quot;You are Mistral Small 3, a Large Language Model (LLM) created by Mistral AI, a French startup headquartered in Paris.\nYour knowledge base was last updated on 2023-10-01. The current date is &quot; + today + &quot;.\n\nWhen you're not sure about some information, you say that you don't have the information and don't make up anything.\nIf the user's question is not clear, ambiguous, or does not provide enough context for you to accurately answer the question, you do not try to answer it right away and you rather ask the user to clarify their request (e.g. \&quot;What are some good restaurants around me?\&quot; => \&quot;Where are you?\&quot; or \&quot;When is the next flight to Tokyo\&quot; => \&quot;Where do you travel from?\&quot;)&quot; %}&#10;&#10;{{- bos_token }}&#10;&#10;{%- if messages[0]['role'] == 'system' %}&#10; {%- if messages[0]['content'] is string %}&#10; {%- set system_message = messages[0]['content'] %}&#10; {%- else %}&#10; {%- set system_message = messages[0]['content'][0]['text'] %}&#10; {%- endif %}&#10; {%- set loop_messages = messages[1:] %}&#10;{%- else %}&#10; {%- set system_message = default_system_message %}&#10; {%- set loop_messages = messages %}&#10;{%- endif %}&#10;{{- '[SYSTEM_PROMPT]' + system_message + '[/SYSTEM_PROMPT]' }}&#10;&#10;{%- for message in loop_messages %}&#10; {%- if message['role'] == 'user' %}&#10; {%- if message['content'] is string %}&#10; {{- '[INST]' + message['content'] + '[/INST]' }}&#10; {%- else %}&#10; {{- '[INST]' }}&#10; {%- for block in message['content'] %}&#10; {%- if block['type'] == 'text' %}&#10; {{- block['text'] }}&#10; {%- elif block['type'] in ['image', 'image_url'] %}&#10; {{- '[IMG]' }}&#10; {%- else %}&#10; {{- raise_exception('Only text and image blocks are supported in message content!') }}&#10; {%- endif %}&#10; {%- endfor %}&#10; {{- '[/INST]' }}&#10; {%- endif %}&#10; {%- elif message['role'] == 'system' %}&#10; {%- if message['content'] is string %}&#10; {{- '[SYSTEM_PROMPT]' + message['content'] + '[/SYSTEM_PROMPT]' }}&#10; {%- else %}&#10; {{- '[SYSTEM_PROMPT]' + message['content'][0]['text'] + '[/SYSTEM_PROMPT]' }}&#10; {%- endif %}&#10; {%- elif message['role'] == 'assistant' %}&#10; {%- if message['content'] is string %}&#10; {{- message['content'] + eos_token }}&#10; {%- else %}&#10; {{- message['content'][0]['text'] + eos_token }}&#10; {%- endif %}&#10; {%- else %}&#10; {{- raise_exception('Only user, system and assistant roles are supported!') }}&#10; {%- endif %}&#10;{%- endfor %}" />
198
+ <clean_up_tokenization_spaces />
199
+ <detokenizer_input_type value="i64" />
200
+ <eos_token_id value="2" />
201
+ <handle_special_tokens_with_re />
202
+ <max_length />
203
+ <number_of_inputs value="1" />
204
+ <openvino_tokenizers_version value="2025.3.0.0-598-57f278c8468" />
205
+ <openvino_version value="2025.3.0-19807-44526285f24-releases/2025/3" />
206
+ <original_post_processor_template value="{&quot;type&quot;: &quot;TemplateProcessing&quot;, &quot;single&quot;: [{&quot;SpecialToken&quot;: {&quot;id&quot;: &quot;&lt;s>&quot;, &quot;type_id&quot;: 0}}, {&quot;Sequence&quot;: {&quot;id&quot;: &quot;A&quot;, &quot;type_id&quot;: 0}}], &quot;pair&quot;: [{&quot;SpecialToken&quot;: {&quot;id&quot;: &quot;&lt;s>&quot;, &quot;type_id&quot;: 0}}, {&quot;Sequence&quot;: {&quot;id&quot;: &quot;A&quot;, &quot;type_id&quot;: 0}}, {&quot;SpecialToken&quot;: {&quot;id&quot;: &quot;&lt;s>&quot;, &quot;type_id&quot;: 1}}, {&quot;Sequence&quot;: {&quot;id&quot;: &quot;B&quot;, &quot;type_id&quot;: 1}}], &quot;special_tokens&quot;: {&quot;&lt;s>&quot;: {&quot;id&quot;: &quot;&lt;s>&quot;, &quot;ids&quot;: [1], &quot;tokens&quot;: [&quot;&lt;s>&quot;]}}}" />
207
+ <original_tokenizer_class value="&lt;class 'transformers.models.llama.tokenization_llama_fast.LlamaTokenizerFast'>" />
208
+ <pad_token_id value="11" />
209
+ <processed_post_processor_template value="{&quot;single&quot;: {&quot;ids&quot;: [1, -1], &quot;type_ids&quot;: [0, 0]}, &quot;pair&quot;: {&quot;ids&quot;: [1, -1, 1, -2], &quot;type_ids&quot;: [0, 0, 1, 1]}}" />
210
+ <skip_special_tokens value="True" />
211
+ <streaming_detokenizer value="False" />
212
+ <tokenizer_output_type value="i64" />
213
+ <tokenizers_version value="0.21.2" />
214
+ <transformers_version value="4.52.4" />
215
+ <use_max_padding value="False" />
216
+ <use_sentencepiece_backend value="False" />
217
+ <utf8_replace_mode value="replace" />
218
+ <with_detokenizer value="True" />
219
+ </rt_info>
220
+ </net>
openvino_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d63a854f26e249826ad4ce8534e2144d9bcf58446a7b507bcf9246b6465d76e
3
+ size 12893704796
openvino_model.xml ADDED
The diff for this file is too large to render. See raw diff
 
openvino_tokenizer.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9a1d40a0dc8ebf7db17d2ca7d7b99e3219fea54c8fb27ed07071dc430217d0f
3
+ size 8151022
openvino_tokenizer.xml ADDED
@@ -0,0 +1,764 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="tokenizer" version="11">
3
+ <layers>
4
+ <layer id="0" name="Parameter_116651" type="Parameter" version="opset1">
5
+ <data shape="?" element_type="string" />
6
+ <output>
7
+ <port id="0" precision="STRING" names="Parameter_116651">
8
+ <dim>-1</dim>
9
+ </port>
10
+ </output>
11
+ </layer>
12
+ <layer id="1" name="Constant_116765" type="Const" version="opset1">
13
+ <data element_type="i32" shape="" offset="0" size="4" />
14
+ <output>
15
+ <port id="0" precision="I32" />
16
+ </output>
17
+ </layer>
18
+ <layer id="2" name="Constant_116766" type="Const" version="opset1">
19
+ <data element_type="i32" shape="" offset="4" size="4" />
20
+ <output>
21
+ <port id="0" precision="I32" />
22
+ </output>
23
+ </layer>
24
+ <layer id="3" name="Constant_116767" type="Const" version="opset1">
25
+ <data element_type="i32" shape="1" offset="4" size="4" />
26
+ <output>
27
+ <port id="0" precision="I32">
28
+ <dim>1</dim>
29
+ </port>
30
+ </output>
31
+ </layer>
32
+ <layer id="4" name="Constant_116657" type="Const" version="opset1">
33
+ <data element_type="i64" shape="" offset="8" size="8" />
34
+ <output>
35
+ <port id="0" precision="I64" />
36
+ </output>
37
+ </layer>
38
+ <layer id="5" name="StringTensorUnpack_116652" type="StringTensorUnpack" version="opset15">
39
+ <input>
40
+ <port id="0" precision="STRING">
41
+ <dim>-1</dim>
42
+ </port>
43
+ </input>
44
+ <output>
45
+ <port id="1" precision="I32">
46
+ <dim>-1</dim>
47
+ </port>
48
+ <port id="2" precision="I32">
49
+ <dim>-1</dim>
50
+ </port>
51
+ <port id="3" precision="U8">
52
+ <dim>-1</dim>
53
+ </port>
54
+ </output>
55
+ </layer>
56
+ <layer id="6" name="ShapeOf_116653" type="ShapeOf" version="opset3">
57
+ <data output_type="i64" />
58
+ <input>
59
+ <port id="0" precision="I32">
60
+ <dim>-1</dim>
61
+ </port>
62
+ </input>
63
+ <output>
64
+ <port id="1" precision="I64">
65
+ <dim>1</dim>
66
+ </port>
67
+ </output>
68
+ </layer>
69
+ <layer id="7" name="Constant_116654" type="Const" version="opset1">
70
+ <data element_type="i64" shape="" offset="8" size="8" />
71
+ <output>
72
+ <port id="0" precision="I64" />
73
+ </output>
74
+ </layer>
75
+ <layer id="8" name="Constant_116655" type="Const" version="opset1">
76
+ <data element_type="i64" shape="" offset="8" size="8" />
77
+ <output>
78
+ <port id="0" precision="I64" />
79
+ </output>
80
+ </layer>
81
+ <layer id="9" name="Gather_116656" type="Gather" version="opset8">
82
+ <data batch_dims="0" />
83
+ <input>
84
+ <port id="0" precision="I64">
85
+ <dim>1</dim>
86
+ </port>
87
+ <port id="1" precision="I64" />
88
+ <port id="2" precision="I64" />
89
+ </input>
90
+ <output>
91
+ <port id="3" precision="I64" />
92
+ </output>
93
+ </layer>
94
+ <layer id="10" name="Constant_116658" type="Const" version="opset1">
95
+ <data element_type="i64" shape="" offset="16" size="8" />
96
+ <output>
97
+ <port id="0" precision="I64" />
98
+ </output>
99
+ </layer>
100
+ <layer id="11" name="Range_116659" type="Range" version="opset4">
101
+ <data output_type="i32" />
102
+ <input>
103
+ <port id="0" precision="I64" />
104
+ <port id="1" precision="I64" />
105
+ <port id="2" precision="I64" />
106
+ </input>
107
+ <output>
108
+ <port id="3" precision="I32">
109
+ <dim>-1</dim>
110
+ </port>
111
+ </output>
112
+ </layer>
113
+ <layer id="12" name="Constant_116660" type="Const" version="opset1">
114
+ <data element_type="i64" shape="" offset="16" size="8" />
115
+ <output>
116
+ <port id="0" precision="I64" />
117
+ </output>
118
+ </layer>
119
+ <layer id="13" name="Constant_116661" type="Const" version="opset1">
120
+ <data element_type="i64" shape="" offset="16" size="8" />
121
+ <output>
122
+ <port id="0" precision="I64" />
123
+ </output>
124
+ </layer>
125
+ <layer id="14" name="Add_116662" type="Add" version="opset1">
126
+ <data auto_broadcast="numpy" />
127
+ <input>
128
+ <port id="0" precision="I64" />
129
+ <port id="1" precision="I64" />
130
+ </input>
131
+ <output>
132
+ <port id="2" precision="I64" />
133
+ </output>
134
+ </layer>
135
+ <layer id="15" name="Constant_116663" type="Const" version="opset1">
136
+ <data element_type="i64" shape="" offset="16" size="8" />
137
+ <output>
138
+ <port id="0" precision="I64" />
139
+ </output>
140
+ </layer>
141
+ <layer id="16" name="Range_116664" type="Range" version="opset4">
142
+ <data output_type="i32" />
143
+ <input>
144
+ <port id="0" precision="I64" />
145
+ <port id="1" precision="I64" />
146
+ <port id="2" precision="I64" />
147
+ </input>
148
+ <output>
149
+ <port id="3" precision="I32">
150
+ <dim>-1</dim>
151
+ </port>
152
+ </output>
153
+ </layer>
154
+ <layer id="17" name="Constant_116728" type="Const" version="opset1">
155
+ <data element_type="u8" shape="17864" offset="24" size="17864" />
156
+ <output>
157
+ <port id="0" precision="U8">
158
+ <dim>17864</dim>
159
+ </port>
160
+ </output>
161
+ </layer>
162
+ <layer id="18" name="SpecialTokensSplit_116729" type="SpecialTokensSplit" version="extension">
163
+ <input>
164
+ <port id="0" precision="I32">
165
+ <dim>-1</dim>
166
+ </port>
167
+ <port id="1" precision="I32">
168
+ <dim>-1</dim>
169
+ </port>
170
+ <port id="2" precision="I32">
171
+ <dim>-1</dim>
172
+ </port>
173
+ <port id="3" precision="I32">
174
+ <dim>-1</dim>
175
+ </port>
176
+ <port id="4" precision="U8">
177
+ <dim>-1</dim>
178
+ </port>
179
+ <port id="5" precision="U8">
180
+ <dim>17864</dim>
181
+ </port>
182
+ </input>
183
+ <output>
184
+ <port id="6" precision="I32">
185
+ <dim>-1</dim>
186
+ </port>
187
+ <port id="7" precision="I32">
188
+ <dim>-1</dim>
189
+ </port>
190
+ <port id="8" precision="I32">
191
+ <dim>-1</dim>
192
+ </port>
193
+ <port id="9" precision="I32">
194
+ <dim>-1</dim>
195
+ </port>
196
+ <port id="10" precision="U8">
197
+ <dim>-1</dim>
198
+ </port>
199
+ <port id="11" precision="BOOL">
200
+ <dim>-1</dim>
201
+ </port>
202
+ </output>
203
+ </layer>
204
+ <layer id="19" name="Constant_116731" type="Const" version="opset1">
205
+ <data element_type="u8" shape="115" offset="17888" size="115" />
206
+ <output>
207
+ <port id="0" precision="U8">
208
+ <dim>115</dim>
209
+ </port>
210
+ </output>
211
+ </layer>
212
+ <layer id="20" name="RegexSplit_116732" type="RegexSplit" version="extension">
213
+ <data behaviour="isolate" invert="false" max_splits="-1" />
214
+ <input>
215
+ <port id="0" precision="I32">
216
+ <dim>-1</dim>
217
+ </port>
218
+ <port id="1" precision="I32">
219
+ <dim>-1</dim>
220
+ </port>
221
+ <port id="2" precision="I32">
222
+ <dim>-1</dim>
223
+ </port>
224
+ <port id="3" precision="I32">
225
+ <dim>-1</dim>
226
+ </port>
227
+ <port id="4" precision="U8">
228
+ <dim>-1</dim>
229
+ </port>
230
+ <port id="5" precision="BOOL">
231
+ <dim>-1</dim>
232
+ </port>
233
+ <port id="6" precision="U8">
234
+ <dim>115</dim>
235
+ </port>
236
+ </input>
237
+ <output>
238
+ <port id="7" precision="I32">
239
+ <dim>-1</dim>
240
+ </port>
241
+ <port id="8" precision="I32">
242
+ <dim>-1</dim>
243
+ </port>
244
+ <port id="9" precision="I32">
245
+ <dim>-1</dim>
246
+ </port>
247
+ <port id="10" precision="I32">
248
+ <dim>-1</dim>
249
+ </port>
250
+ <port id="11" precision="U8">
251
+ <dim>-1</dim>
252
+ </port>
253
+ <port id="12" precision="BOOL">
254
+ <dim>-1</dim>
255
+ </port>
256
+ </output>
257
+ </layer>
258
+ <layer id="21" name="Constant_116734" type="Const" version="opset1">
259
+ <data element_type="i32" shape="131072" offset="18003" size="524288" />
260
+ <output>
261
+ <port id="0" precision="I32">
262
+ <dim>131072</dim>
263
+ </port>
264
+ </output>
265
+ </layer>
266
+ <layer id="22" name="Constant_116736" type="Const" version="opset1">
267
+ <data element_type="i32" shape="131072" offset="542291" size="524288" />
268
+ <output>
269
+ <port id="0" precision="I32">
270
+ <dim>131072</dim>
271
+ </port>
272
+ </output>
273
+ </layer>
274
+ <layer id="23" name="Constant_116738" type="Const" version="opset1">
275
+ <data element_type="u8" shape="891118" offset="1066579" size="891118" />
276
+ <output>
277
+ <port id="0" precision="U8">
278
+ <dim>891118</dim>
279
+ </port>
280
+ </output>
281
+ </layer>
282
+ <layer id="24" name="Constant_116746" type="Const" version="opset1">
283
+ <data element_type="i32" shape="269443" offset="1957697" size="1077772" />
284
+ <output>
285
+ <port id="0" precision="I32">
286
+ <dim>269443</dim>
287
+ </port>
288
+ </output>
289
+ </layer>
290
+ <layer id="25" name="Constant_116748" type="Const" version="opset1">
291
+ <data element_type="i32" shape="269443" offset="3035469" size="1077772" />
292
+ <output>
293
+ <port id="0" precision="I32">
294
+ <dim>269443</dim>
295
+ </port>
296
+ </output>
297
+ </layer>
298
+ <layer id="26" name="Constant_116750" type="Const" version="opset1">
299
+ <data element_type="u8" shape="989624" offset="4113241" size="989624" />
300
+ <output>
301
+ <port id="0" precision="U8">
302
+ <dim>989624</dim>
303
+ </port>
304
+ </output>
305
+ </layer>
306
+ <layer id="27" name="Constant_116752" type="Const" version="opset1">
307
+ <data element_type="i32" shape="269443" offset="5102865" size="1077772" />
308
+ <output>
309
+ <port id="0" precision="I32">
310
+ <dim>269443</dim>
311
+ </port>
312
+ </output>
313
+ </layer>
314
+ <layer id="28" name="Constant_116754" type="Const" version="opset1">
315
+ <data element_type="i32" shape="269443" offset="6180637" size="1077772" />
316
+ <output>
317
+ <port id="0" precision="I32">
318
+ <dim>269443</dim>
319
+ </port>
320
+ </output>
321
+ </layer>
322
+ <layer id="29" name="Constant_116756" type="Const" version="opset1">
323
+ <data element_type="u8" shape="867745" offset="7258409" size="867745" />
324
+ <output>
325
+ <port id="0" precision="U8">
326
+ <dim>867745</dim>
327
+ </port>
328
+ </output>
329
+ </layer>
330
+ <layer id="30" name="Constant_116740" type="Const" version="opset1">
331
+ <data element_type="i32" shape="999" offset="8126154" size="3996" />
332
+ <output>
333
+ <port id="0" precision="I32">
334
+ <dim>999</dim>
335
+ </port>
336
+ </output>
337
+ </layer>
338
+ <layer id="31" name="Constant_116742" type="Const" version="opset1">
339
+ <data element_type="i32" shape="999" offset="8130150" size="3996" />
340
+ <output>
341
+ <port id="0" precision="I32">
342
+ <dim>999</dim>
343
+ </port>
344
+ </output>
345
+ </layer>
346
+ <layer id="32" name="Constant_116744" type="Const" version="opset1">
347
+ <data element_type="u8" shape="12855" offset="8134146" size="12855" />
348
+ <output>
349
+ <port id="0" precision="U8">
350
+ <dim>12855</dim>
351
+ </port>
352
+ </output>
353
+ </layer>
354
+ <layer id="33" name="Constant_116757" type="Const" version="opset1">
355
+ <data element_type="i32" shape="999" offset="8147001" size="3996" />
356
+ <output>
357
+ <port id="0" precision="I32">
358
+ <dim>999</dim>
359
+ </port>
360
+ </output>
361
+ </layer>
362
+ <layer id="34" name="BPETokenizer_116758" type="BPETokenizer" version="extension">
363
+ <data unk_token="" fuse_unk="false" suffix_indicator="" end_suffix="" byte_fallback="false" cache_capacity="26214" />
364
+ <input>
365
+ <port id="0" precision="I32">
366
+ <dim>-1</dim>
367
+ </port>
368
+ <port id="1" precision="I32">
369
+ <dim>-1</dim>
370
+ </port>
371
+ <port id="2" precision="I32">
372
+ <dim>-1</dim>
373
+ </port>
374
+ <port id="3" precision="I32">
375
+ <dim>-1</dim>
376
+ </port>
377
+ <port id="4" precision="U8">
378
+ <dim>-1</dim>
379
+ </port>
380
+ <port id="5" precision="I32">
381
+ <dim>131072</dim>
382
+ </port>
383
+ <port id="6" precision="I32">
384
+ <dim>131072</dim>
385
+ </port>
386
+ <port id="7" precision="U8">
387
+ <dim>891118</dim>
388
+ </port>
389
+ <port id="8" precision="I32">
390
+ <dim>269443</dim>
391
+ </port>
392
+ <port id="9" precision="I32">
393
+ <dim>269443</dim>
394
+ </port>
395
+ <port id="10" precision="U8">
396
+ <dim>989624</dim>
397
+ </port>
398
+ <port id="11" precision="I32">
399
+ <dim>269443</dim>
400
+ </port>
401
+ <port id="12" precision="I32">
402
+ <dim>269443</dim>
403
+ </port>
404
+ <port id="13" precision="U8">
405
+ <dim>867745</dim>
406
+ </port>
407
+ <port id="14" precision="I32">
408
+ <dim>999</dim>
409
+ </port>
410
+ <port id="15" precision="I32">
411
+ <dim>999</dim>
412
+ </port>
413
+ <port id="16" precision="U8">
414
+ <dim>12855</dim>
415
+ </port>
416
+ <port id="17" precision="I32">
417
+ <dim>999</dim>
418
+ </port>
419
+ </input>
420
+ <output>
421
+ <port id="18" precision="I32">
422
+ <dim>-1</dim>
423
+ </port>
424
+ <port id="19" precision="I32">
425
+ <dim>-1</dim>
426
+ </port>
427
+ <port id="20" precision="I32">
428
+ <dim>-1</dim>
429
+ </port>
430
+ </output>
431
+ </layer>
432
+ <layer id="35" name="Constant_116759" type="Const" version="opset1">
433
+ <data element_type="i32" shape="" offset="8150997" size="4" />
434
+ <output>
435
+ <port id="0" precision="I32" />
436
+ </output>
437
+ </layer>
438
+ <layer id="36" name="Constant_116761" type="Const" version="opset1">
439
+ <data element_type="u8" shape="4" offset="8151001" size="4" />
440
+ <output>
441
+ <port id="0" precision="U8">
442
+ <dim>4</dim>
443
+ </port>
444
+ </output>
445
+ </layer>
446
+ <layer id="37" name="Constant_116763" type="Const" version="opset1">
447
+ <data element_type="u8" shape="13" offset="8151005" size="13" />
448
+ <output>
449
+ <port id="0" precision="U8">
450
+ <dim>13</dim>
451
+ </port>
452
+ </output>
453
+ </layer>
454
+ <layer id="38" name="Truncate_116764" type="Truncate" version="extension">
455
+ <data m_num_inputs="1" />
456
+ <input>
457
+ <port id="0" precision="I32">
458
+ <dim>-1</dim>
459
+ </port>
460
+ <port id="1" precision="I32">
461
+ <dim>-1</dim>
462
+ </port>
463
+ <port id="2" precision="I32">
464
+ <dim>-1</dim>
465
+ </port>
466
+ <port id="3" precision="I32" />
467
+ <port id="4" precision="U8">
468
+ <dim>4</dim>
469
+ </port>
470
+ <port id="5" precision="U8">
471
+ <dim>13</dim>
472
+ </port>
473
+ </input>
474
+ <output>
475
+ <port id="6" precision="I32">
476
+ <dim>-1</dim>
477
+ </port>
478
+ <port id="7" precision="I32">
479
+ <dim>-1</dim>
480
+ </port>
481
+ <port id="8" precision="I32">
482
+ <dim>-1</dim>
483
+ </port>
484
+ </output>
485
+ </layer>
486
+ <layer id="39" name="Constant_116768" type="Const" version="opset1">
487
+ <data element_type="i32" shape="2" offset="8" size="8" />
488
+ <output>
489
+ <port id="0" precision="I32">
490
+ <dim>2</dim>
491
+ </port>
492
+ </output>
493
+ </layer>
494
+ <layer id="40" name="CombineSegments_116769" type="CombineSegments" version="extension">
495
+ <input>
496
+ <port id="0" precision="I32" />
497
+ <port id="1" precision="I32" />
498
+ <port id="2" precision="I32">
499
+ <dim>1</dim>
500
+ </port>
501
+ <port id="3" precision="I32">
502
+ <dim>-1</dim>
503
+ </port>
504
+ <port id="4" precision="I32">
505
+ <dim>-1</dim>
506
+ </port>
507
+ <port id="5" precision="I32">
508
+ <dim>-1</dim>
509
+ </port>
510
+ <port id="6" precision="I32">
511
+ <dim>2</dim>
512
+ </port>
513
+ </input>
514
+ <output>
515
+ <port id="7" precision="I32">
516
+ <dim>-1</dim>
517
+ </port>
518
+ <port id="8" precision="I32">
519
+ <dim>-1</dim>
520
+ </port>
521
+ <port id="9" precision="I32">
522
+ <dim>-1</dim>
523
+ </port>
524
+ <port id="10" precision="I32">
525
+ <dim>-1</dim>
526
+ </port>
527
+ <port id="11" precision="I32">
528
+ <dim>-1</dim>
529
+ </port>
530
+ <port id="12" precision="I32">
531
+ <dim>-1</dim>
532
+ </port>
533
+ </output>
534
+ </layer>
535
+ <layer id="41" name="Subtract_116770" type="Subtract" version="opset1">
536
+ <data auto_broadcast="numpy" />
537
+ <input>
538
+ <port id="0" precision="I32">
539
+ <dim>-1</dim>
540
+ </port>
541
+ <port id="1" precision="I32">
542
+ <dim>-1</dim>
543
+ </port>
544
+ </input>
545
+ <output>
546
+ <port id="2" precision="I32">
547
+ <dim>-1</dim>
548
+ </port>
549
+ </output>
550
+ </layer>
551
+ <layer id="42" name="Constant_116771" type="Const" version="opset1">
552
+ <data element_type="i32" shape="" offset="0" size="4" />
553
+ <output>
554
+ <port id="0" precision="I32" />
555
+ </output>
556
+ </layer>
557
+ <layer id="43" name="ReduceMax_116772" type="ReduceMax" version="opset1">
558
+ <data keep_dims="false" />
559
+ <input>
560
+ <port id="0" precision="I32">
561
+ <dim>-1</dim>
562
+ </port>
563
+ <port id="1" precision="I32" />
564
+ </input>
565
+ <output>
566
+ <port id="2" precision="I32" />
567
+ </output>
568
+ </layer>
569
+ <layer id="44" name="Constant_116773" type="Const" version="opset1">
570
+ <data element_type="i32" shape="" offset="8151018" size="4" />
571
+ <output>
572
+ <port id="0" precision="I32" />
573
+ </output>
574
+ </layer>
575
+ <layer id="45" name="RaggedToDense_116774" type="RaggedToDense" version="extension">
576
+ <data pad_right="false" m_pad_max_length="false" />
577
+ <input>
578
+ <port id="0" precision="I32">
579
+ <dim>-1</dim>
580
+ </port>
581
+ <port id="1" precision="I32">
582
+ <dim>-1</dim>
583
+ </port>
584
+ <port id="2" precision="I32">
585
+ <dim>-1</dim>
586
+ </port>
587
+ <port id="3" precision="I32" />
588
+ <port id="4" precision="I32" />
589
+ </input>
590
+ <output>
591
+ <port id="5" precision="I32">
592
+ <dim>-1</dim>
593
+ <dim>-1</dim>
594
+ </port>
595
+ <port id="6" precision="BOOL">
596
+ <dim>-1</dim>
597
+ <dim>-1</dim>
598
+ </port>
599
+ </output>
600
+ </layer>
601
+ <layer id="46" name="Convert_116775" type="Convert" version="opset1">
602
+ <data destination_type="i32" />
603
+ <input>
604
+ <port id="0" precision="BOOL">
605
+ <dim>-1</dim>
606
+ <dim>-1</dim>
607
+ </port>
608
+ </input>
609
+ <output>
610
+ <port id="1" precision="I32">
611
+ <dim>-1</dim>
612
+ <dim>-1</dim>
613
+ </port>
614
+ </output>
615
+ </layer>
616
+ <layer id="47" name="Convert_116775.0" type="Convert" version="opset1">
617
+ <data destination_type="i64" />
618
+ <input>
619
+ <port id="0" precision="I32">
620
+ <dim>-1</dim>
621
+ <dim>-1</dim>
622
+ </port>
623
+ </input>
624
+ <output>
625
+ <port id="1" precision="I64" names="attention_mask">
626
+ <dim>-1</dim>
627
+ <dim>-1</dim>
628
+ </port>
629
+ </output>
630
+ </layer>
631
+ <layer id="49" name="RaggedToDense_116774.0" type="Convert" version="opset1">
632
+ <data destination_type="i64" />
633
+ <input>
634
+ <port id="0" precision="I32">
635
+ <dim>-1</dim>
636
+ <dim>-1</dim>
637
+ </port>
638
+ </input>
639
+ <output>
640
+ <port id="1" precision="I64" names="input_ids">
641
+ <dim>-1</dim>
642
+ <dim>-1</dim>
643
+ </port>
644
+ </output>
645
+ </layer>
646
+ <layer id="50" name="Result_116778" type="Result" version="opset1" output_names="input_ids">
647
+ <input>
648
+ <port id="0" precision="I64">
649
+ <dim>-1</dim>
650
+ <dim>-1</dim>
651
+ </port>
652
+ </input>
653
+ </layer>
654
+ <layer id="48" name="Result_116780" type="Result" version="opset1" output_names="attention_mask">
655
+ <input>
656
+ <port id="0" precision="I64">
657
+ <dim>-1</dim>
658
+ <dim>-1</dim>
659
+ </port>
660
+ </input>
661
+ </layer>
662
+ </layers>
663
+ <edges>
664
+ <edge from-layer="0" from-port="0" to-layer="5" to-port="0" />
665
+ <edge from-layer="1" from-port="0" to-layer="40" to-port="0" />
666
+ <edge from-layer="2" from-port="0" to-layer="40" to-port="1" />
667
+ <edge from-layer="3" from-port="0" to-layer="40" to-port="2" />
668
+ <edge from-layer="4" from-port="0" to-layer="11" to-port="0" />
669
+ <edge from-layer="5" from-port="1" to-layer="6" to-port="0" />
670
+ <edge from-layer="5" from-port="3" to-layer="18" to-port="4" />
671
+ <edge from-layer="5" from-port="2" to-layer="18" to-port="3" />
672
+ <edge from-layer="5" from-port="1" to-layer="18" to-port="2" />
673
+ <edge from-layer="6" from-port="1" to-layer="9" to-port="0" />
674
+ <edge from-layer="7" from-port="0" to-layer="9" to-port="1" />
675
+ <edge from-layer="8" from-port="0" to-layer="9" to-port="2" />
676
+ <edge from-layer="9" from-port="3" to-layer="14" to-port="0" />
677
+ <edge from-layer="9" from-port="3" to-layer="11" to-port="1" />
678
+ <edge from-layer="10" from-port="0" to-layer="11" to-port="2" />
679
+ <edge from-layer="11" from-port="3" to-layer="18" to-port="0" />
680
+ <edge from-layer="12" from-port="0" to-layer="16" to-port="0" />
681
+ <edge from-layer="13" from-port="0" to-layer="14" to-port="1" />
682
+ <edge from-layer="14" from-port="2" to-layer="16" to-port="1" />
683
+ <edge from-layer="15" from-port="0" to-layer="16" to-port="2" />
684
+ <edge from-layer="16" from-port="3" to-layer="18" to-port="1" />
685
+ <edge from-layer="17" from-port="0" to-layer="18" to-port="5" />
686
+ <edge from-layer="18" from-port="11" to-layer="20" to-port="5" />
687
+ <edge from-layer="18" from-port="10" to-layer="20" to-port="4" />
688
+ <edge from-layer="18" from-port="9" to-layer="20" to-port="3" />
689
+ <edge from-layer="18" from-port="8" to-layer="20" to-port="2" />
690
+ <edge from-layer="18" from-port="7" to-layer="20" to-port="1" />
691
+ <edge from-layer="18" from-port="6" to-layer="20" to-port="0" />
692
+ <edge from-layer="19" from-port="0" to-layer="20" to-port="6" />
693
+ <edge from-layer="20" from-port="7" to-layer="34" to-port="0" />
694
+ <edge from-layer="20" from-port="8" to-layer="34" to-port="1" />
695
+ <edge from-layer="20" from-port="9" to-layer="34" to-port="2" />
696
+ <edge from-layer="20" from-port="10" to-layer="34" to-port="3" />
697
+ <edge from-layer="20" from-port="11" to-layer="34" to-port="4" />
698
+ <edge from-layer="21" from-port="0" to-layer="34" to-port="5" />
699
+ <edge from-layer="22" from-port="0" to-layer="34" to-port="6" />
700
+ <edge from-layer="23" from-port="0" to-layer="34" to-port="7" />
701
+ <edge from-layer="24" from-port="0" to-layer="34" to-port="8" />
702
+ <edge from-layer="25" from-port="0" to-layer="34" to-port="9" />
703
+ <edge from-layer="26" from-port="0" to-layer="34" to-port="10" />
704
+ <edge from-layer="27" from-port="0" to-layer="34" to-port="11" />
705
+ <edge from-layer="28" from-port="0" to-layer="34" to-port="12" />
706
+ <edge from-layer="29" from-port="0" to-layer="34" to-port="13" />
707
+ <edge from-layer="30" from-port="0" to-layer="34" to-port="14" />
708
+ <edge from-layer="31" from-port="0" to-layer="34" to-port="15" />
709
+ <edge from-layer="32" from-port="0" to-layer="34" to-port="16" />
710
+ <edge from-layer="33" from-port="0" to-layer="34" to-port="17" />
711
+ <edge from-layer="34" from-port="18" to-layer="38" to-port="0" />
712
+ <edge from-layer="34" from-port="19" to-layer="38" to-port="1" />
713
+ <edge from-layer="34" from-port="20" to-layer="38" to-port="2" />
714
+ <edge from-layer="35" from-port="0" to-layer="38" to-port="3" />
715
+ <edge from-layer="36" from-port="0" to-layer="38" to-port="4" />
716
+ <edge from-layer="37" from-port="0" to-layer="38" to-port="5" />
717
+ <edge from-layer="38" from-port="8" to-layer="40" to-port="5" />
718
+ <edge from-layer="38" from-port="6" to-layer="40" to-port="3" />
719
+ <edge from-layer="38" from-port="7" to-layer="40" to-port="4" />
720
+ <edge from-layer="39" from-port="0" to-layer="40" to-port="6" />
721
+ <edge from-layer="40" from-port="8" to-layer="45" to-port="1" />
722
+ <edge from-layer="40" from-port="9" to-layer="45" to-port="2" />
723
+ <edge from-layer="40" from-port="7" to-layer="45" to-port="0" />
724
+ <edge from-layer="40" from-port="7" to-layer="41" to-port="1" />
725
+ <edge from-layer="40" from-port="8" to-layer="41" to-port="0" />
726
+ <edge from-layer="41" from-port="2" to-layer="43" to-port="0" />
727
+ <edge from-layer="42" from-port="0" to-layer="43" to-port="1" />
728
+ <edge from-layer="43" from-port="2" to-layer="45" to-port="3" />
729
+ <edge from-layer="44" from-port="0" to-layer="45" to-port="4" />
730
+ <edge from-layer="45" from-port="6" to-layer="46" to-port="0" />
731
+ <edge from-layer="45" from-port="5" to-layer="49" to-port="0" />
732
+ <edge from-layer="46" from-port="1" to-layer="47" to-port="0" />
733
+ <edge from-layer="47" from-port="1" to-layer="48" to-port="0" />
734
+ <edge from-layer="49" from-port="1" to-layer="50" to-port="0" />
735
+ </edges>
736
+ <rt_info>
737
+ <add_attention_mask value="True" />
738
+ <add_prefix_space />
739
+ <add_special_tokens value="True" />
740
+ <bos_token_id value="1" />
741
+ <chat_template value="{%- set today = strftime_now(&quot;%Y-%m-%d&quot;) %}&#10;{%- set default_system_message = &quot;You are Mistral Small 3, a Large Language Model (LLM) created by Mistral AI, a French startup headquartered in Paris.\nYour knowledge base was last updated on 2023-10-01. The current date is &quot; + today + &quot;.\n\nWhen you're not sure about some information, you say that you don't have the information and don't make up anything.\nIf the user's question is not clear, ambiguous, or does not provide enough context for you to accurately answer the question, you do not try to answer it right away and you rather ask the user to clarify their request (e.g. \&quot;What are some good restaurants around me?\&quot; => \&quot;Where are you?\&quot; or \&quot;When is the next flight to Tokyo\&quot; => \&quot;Where do you travel from?\&quot;)&quot; %}&#10;&#10;{{- bos_token }}&#10;&#10;{%- if messages[0]['role'] == 'system' %}&#10; {%- if messages[0]['content'] is string %}&#10; {%- set system_message = messages[0]['content'] %}&#10; {%- else %}&#10; {%- set system_message = messages[0]['content'][0]['text'] %}&#10; {%- endif %}&#10; {%- set loop_messages = messages[1:] %}&#10;{%- else %}&#10; {%- set system_message = default_system_message %}&#10; {%- set loop_messages = messages %}&#10;{%- endif %}&#10;{{- '[SYSTEM_PROMPT]' + system_message + '[/SYSTEM_PROMPT]' }}&#10;&#10;{%- for message in loop_messages %}&#10; {%- if message['role'] == 'user' %}&#10; {%- if message['content'] is string %}&#10; {{- '[INST]' + message['content'] + '[/INST]' }}&#10; {%- else %}&#10; {{- '[INST]' }}&#10; {%- for block in message['content'] %}&#10; {%- if block['type'] == 'text' %}&#10; {{- block['text'] }}&#10; {%- elif block['type'] in ['image', 'image_url'] %}&#10; {{- '[IMG]' }}&#10; {%- else %}&#10; {{- raise_exception('Only text and image blocks are supported in message content!') }}&#10; {%- endif %}&#10; {%- endfor %}&#10; {{- '[/INST]' }}&#10; {%- endif %}&#10; {%- elif message['role'] == 'system' %}&#10; {%- if message['content'] is string %}&#10; {{- '[SYSTEM_PROMPT]' + message['content'] + '[/SYSTEM_PROMPT]' }}&#10; {%- else %}&#10; {{- '[SYSTEM_PROMPT]' + message['content'][0]['text'] + '[/SYSTEM_PROMPT]' }}&#10; {%- endif %}&#10; {%- elif message['role'] == 'assistant' %}&#10; {%- if message['content'] is string %}&#10; {{- message['content'] + eos_token }}&#10; {%- else %}&#10; {{- message['content'][0]['text'] + eos_token }}&#10; {%- endif %}&#10; {%- else %}&#10; {{- raise_exception('Only user, system and assistant roles are supported!') }}&#10; {%- endif %}&#10;{%- endfor %}" />
742
+ <clean_up_tokenization_spaces />
743
+ <detokenizer_input_type value="i64" />
744
+ <eos_token_id value="2" />
745
+ <handle_special_tokens_with_re />
746
+ <max_length />
747
+ <number_of_inputs value="1" />
748
+ <openvino_tokenizers_version value="2025.3.0.0-598-57f278c8468" />
749
+ <openvino_version value="2025.3.0-19807-44526285f24-releases/2025/3" />
750
+ <original_post_processor_template value="{&quot;type&quot;: &quot;TemplateProcessing&quot;, &quot;single&quot;: [{&quot;SpecialToken&quot;: {&quot;id&quot;: &quot;&lt;s>&quot;, &quot;type_id&quot;: 0}}, {&quot;Sequence&quot;: {&quot;id&quot;: &quot;A&quot;, &quot;type_id&quot;: 0}}], &quot;pair&quot;: [{&quot;SpecialToken&quot;: {&quot;id&quot;: &quot;&lt;s>&quot;, &quot;type_id&quot;: 0}}, {&quot;Sequence&quot;: {&quot;id&quot;: &quot;A&quot;, &quot;type_id&quot;: 0}}, {&quot;SpecialToken&quot;: {&quot;id&quot;: &quot;&lt;s>&quot;, &quot;type_id&quot;: 1}}, {&quot;Sequence&quot;: {&quot;id&quot;: &quot;B&quot;, &quot;type_id&quot;: 1}}], &quot;special_tokens&quot;: {&quot;&lt;s>&quot;: {&quot;id&quot;: &quot;&lt;s>&quot;, &quot;ids&quot;: [1], &quot;tokens&quot;: [&quot;&lt;s>&quot;]}}}" />
751
+ <original_tokenizer_class value="&lt;class 'transformers.models.llama.tokenization_llama_fast.LlamaTokenizerFast'>" />
752
+ <pad_token_id value="11" />
753
+ <processed_post_processor_template value="{&quot;single&quot;: {&quot;ids&quot;: [1, -1], &quot;type_ids&quot;: [0, 0]}, &quot;pair&quot;: {&quot;ids&quot;: [1, -1, 1, -2], &quot;type_ids&quot;: [0, 0, 1, 1]}}" />
754
+ <skip_special_tokens value="True" />
755
+ <streaming_detokenizer value="False" />
756
+ <tokenizer_output_type value="i64" />
757
+ <tokenizers_version value="0.21.2" />
758
+ <transformers_version value="4.52.4" />
759
+ <use_max_padding value="False" />
760
+ <use_sentencepiece_backend value="False" />
761
+ <utf8_replace_mode value="replace" />
762
+ <with_detokenizer value="True" />
763
+ </rt_info>
764
+ </net>
special_tokens_map.json ADDED
@@ -0,0 +1,1032 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<unk>",
4
+ "<s>",
5
+ "</s>",
6
+ "[INST]",
7
+ "[/INST]",
8
+ "[AVAILABLE_TOOLS]",
9
+ "[/AVAILABLE_TOOLS]",
10
+ "[TOOL_RESULTS]",
11
+ "[/TOOL_RESULTS]",
12
+ "[TOOL_CALLS]",
13
+ "[IMG]",
14
+ "<pad>",
15
+ "[IMG_BREAK]",
16
+ "[IMG_END]",
17
+ "[PREFIX]",
18
+ "[MIDDLE]",
19
+ "[SUFFIX]",
20
+ "[SYSTEM_PROMPT]",
21
+ "[/SYSTEM_PROMPT]",
22
+ "[TOOL_CONTENT]",
23
+ "<SPECIAL_20>",
24
+ "<SPECIAL_21>",
25
+ "<SPECIAL_22>",
26
+ "<SPECIAL_23>",
27
+ "<SPECIAL_24>",
28
+ "<SPECIAL_25>",
29
+ "<SPECIAL_26>",
30
+ "<SPECIAL_27>",
31
+ "<SPECIAL_28>",
32
+ "<SPECIAL_29>",
33
+ "<SPECIAL_30>",
34
+ "<SPECIAL_31>",
35
+ "<SPECIAL_32>",
36
+ "<SPECIAL_33>",
37
+ "<SPECIAL_34>",
38
+ "<SPECIAL_35>",
39
+ "<SPECIAL_36>",
40
+ "<SPECIAL_37>",
41
+ "<SPECIAL_38>",
42
+ "<SPECIAL_39>",
43
+ "<SPECIAL_40>",
44
+ "<SPECIAL_41>",
45
+ "<SPECIAL_42>",
46
+ "<SPECIAL_43>",
47
+ "<SPECIAL_44>",
48
+ "<SPECIAL_45>",
49
+ "<SPECIAL_46>",
50
+ "<SPECIAL_47>",
51
+ "<SPECIAL_48>",
52
+ "<SPECIAL_49>",
53
+ "<SPECIAL_50>",
54
+ "<SPECIAL_51>",
55
+ "<SPECIAL_52>",
56
+ "<SPECIAL_53>",
57
+ "<SPECIAL_54>",
58
+ "<SPECIAL_55>",
59
+ "<SPECIAL_56>",
60
+ "<SPECIAL_57>",
61
+ "<SPECIAL_58>",
62
+ "<SPECIAL_59>",
63
+ "<SPECIAL_60>",
64
+ "<SPECIAL_61>",
65
+ "<SPECIAL_62>",
66
+ "<SPECIAL_63>",
67
+ "<SPECIAL_64>",
68
+ "<SPECIAL_65>",
69
+ "<SPECIAL_66>",
70
+ "<SPECIAL_67>",
71
+ "<SPECIAL_68>",
72
+ "<SPECIAL_69>",
73
+ "<SPECIAL_70>",
74
+ "<SPECIAL_71>",
75
+ "<SPECIAL_72>",
76
+ "<SPECIAL_73>",
77
+ "<SPECIAL_74>",
78
+ "<SPECIAL_75>",
79
+ "<SPECIAL_76>",
80
+ "<SPECIAL_77>",
81
+ "<SPECIAL_78>",
82
+ "<SPECIAL_79>",
83
+ "<SPECIAL_80>",
84
+ "<SPECIAL_81>",
85
+ "<SPECIAL_82>",
86
+ "<SPECIAL_83>",
87
+ "<SPECIAL_84>",
88
+ "<SPECIAL_85>",
89
+ "<SPECIAL_86>",
90
+ "<SPECIAL_87>",
91
+ "<SPECIAL_88>",
92
+ "<SPECIAL_89>",
93
+ "<SPECIAL_90>",
94
+ "<SPECIAL_91>",
95
+ "<SPECIAL_92>",
96
+ "<SPECIAL_93>",
97
+ "<SPECIAL_94>",
98
+ "<SPECIAL_95>",
99
+ "<SPECIAL_96>",
100
+ "<SPECIAL_97>",
101
+ "<SPECIAL_98>",
102
+ "<SPECIAL_99>",
103
+ "<SPECIAL_100>",
104
+ "<SPECIAL_101>",
105
+ "<SPECIAL_102>",
106
+ "<SPECIAL_103>",
107
+ "<SPECIAL_104>",
108
+ "<SPECIAL_105>",
109
+ "<SPECIAL_106>",
110
+ "<SPECIAL_107>",
111
+ "<SPECIAL_108>",
112
+ "<SPECIAL_109>",
113
+ "<SPECIAL_110>",
114
+ "<SPECIAL_111>",
115
+ "<SPECIAL_112>",
116
+ "<SPECIAL_113>",
117
+ "<SPECIAL_114>",
118
+ "<SPECIAL_115>",
119
+ "<SPECIAL_116>",
120
+ "<SPECIAL_117>",
121
+ "<SPECIAL_118>",
122
+ "<SPECIAL_119>",
123
+ "<SPECIAL_120>",
124
+ "<SPECIAL_121>",
125
+ "<SPECIAL_122>",
126
+ "<SPECIAL_123>",
127
+ "<SPECIAL_124>",
128
+ "<SPECIAL_125>",
129
+ "<SPECIAL_126>",
130
+ "<SPECIAL_127>",
131
+ "<SPECIAL_128>",
132
+ "<SPECIAL_129>",
133
+ "<SPECIAL_130>",
134
+ "<SPECIAL_131>",
135
+ "<SPECIAL_132>",
136
+ "<SPECIAL_133>",
137
+ "<SPECIAL_134>",
138
+ "<SPECIAL_135>",
139
+ "<SPECIAL_136>",
140
+ "<SPECIAL_137>",
141
+ "<SPECIAL_138>",
142
+ "<SPECIAL_139>",
143
+ "<SPECIAL_140>",
144
+ "<SPECIAL_141>",
145
+ "<SPECIAL_142>",
146
+ "<SPECIAL_143>",
147
+ "<SPECIAL_144>",
148
+ "<SPECIAL_145>",
149
+ "<SPECIAL_146>",
150
+ "<SPECIAL_147>",
151
+ "<SPECIAL_148>",
152
+ "<SPECIAL_149>",
153
+ "<SPECIAL_150>",
154
+ "<SPECIAL_151>",
155
+ "<SPECIAL_152>",
156
+ "<SPECIAL_153>",
157
+ "<SPECIAL_154>",
158
+ "<SPECIAL_155>",
159
+ "<SPECIAL_156>",
160
+ "<SPECIAL_157>",
161
+ "<SPECIAL_158>",
162
+ "<SPECIAL_159>",
163
+ "<SPECIAL_160>",
164
+ "<SPECIAL_161>",
165
+ "<SPECIAL_162>",
166
+ "<SPECIAL_163>",
167
+ "<SPECIAL_164>",
168
+ "<SPECIAL_165>",
169
+ "<SPECIAL_166>",
170
+ "<SPECIAL_167>",
171
+ "<SPECIAL_168>",
172
+ "<SPECIAL_169>",
173
+ "<SPECIAL_170>",
174
+ "<SPECIAL_171>",
175
+ "<SPECIAL_172>",
176
+ "<SPECIAL_173>",
177
+ "<SPECIAL_174>",
178
+ "<SPECIAL_175>",
179
+ "<SPECIAL_176>",
180
+ "<SPECIAL_177>",
181
+ "<SPECIAL_178>",
182
+ "<SPECIAL_179>",
183
+ "<SPECIAL_180>",
184
+ "<SPECIAL_181>",
185
+ "<SPECIAL_182>",
186
+ "<SPECIAL_183>",
187
+ "<SPECIAL_184>",
188
+ "<SPECIAL_185>",
189
+ "<SPECIAL_186>",
190
+ "<SPECIAL_187>",
191
+ "<SPECIAL_188>",
192
+ "<SPECIAL_189>",
193
+ "<SPECIAL_190>",
194
+ "<SPECIAL_191>",
195
+ "<SPECIAL_192>",
196
+ "<SPECIAL_193>",
197
+ "<SPECIAL_194>",
198
+ "<SPECIAL_195>",
199
+ "<SPECIAL_196>",
200
+ "<SPECIAL_197>",
201
+ "<SPECIAL_198>",
202
+ "<SPECIAL_199>",
203
+ "<SPECIAL_200>",
204
+ "<SPECIAL_201>",
205
+ "<SPECIAL_202>",
206
+ "<SPECIAL_203>",
207
+ "<SPECIAL_204>",
208
+ "<SPECIAL_205>",
209
+ "<SPECIAL_206>",
210
+ "<SPECIAL_207>",
211
+ "<SPECIAL_208>",
212
+ "<SPECIAL_209>",
213
+ "<SPECIAL_210>",
214
+ "<SPECIAL_211>",
215
+ "<SPECIAL_212>",
216
+ "<SPECIAL_213>",
217
+ "<SPECIAL_214>",
218
+ "<SPECIAL_215>",
219
+ "<SPECIAL_216>",
220
+ "<SPECIAL_217>",
221
+ "<SPECIAL_218>",
222
+ "<SPECIAL_219>",
223
+ "<SPECIAL_220>",
224
+ "<SPECIAL_221>",
225
+ "<SPECIAL_222>",
226
+ "<SPECIAL_223>",
227
+ "<SPECIAL_224>",
228
+ "<SPECIAL_225>",
229
+ "<SPECIAL_226>",
230
+ "<SPECIAL_227>",
231
+ "<SPECIAL_228>",
232
+ "<SPECIAL_229>",
233
+ "<SPECIAL_230>",
234
+ "<SPECIAL_231>",
235
+ "<SPECIAL_232>",
236
+ "<SPECIAL_233>",
237
+ "<SPECIAL_234>",
238
+ "<SPECIAL_235>",
239
+ "<SPECIAL_236>",
240
+ "<SPECIAL_237>",
241
+ "<SPECIAL_238>",
242
+ "<SPECIAL_239>",
243
+ "<SPECIAL_240>",
244
+ "<SPECIAL_241>",
245
+ "<SPECIAL_242>",
246
+ "<SPECIAL_243>",
247
+ "<SPECIAL_244>",
248
+ "<SPECIAL_245>",
249
+ "<SPECIAL_246>",
250
+ "<SPECIAL_247>",
251
+ "<SPECIAL_248>",
252
+ "<SPECIAL_249>",
253
+ "<SPECIAL_250>",
254
+ "<SPECIAL_251>",
255
+ "<SPECIAL_252>",
256
+ "<SPECIAL_253>",
257
+ "<SPECIAL_254>",
258
+ "<SPECIAL_255>",
259
+ "<SPECIAL_256>",
260
+ "<SPECIAL_257>",
261
+ "<SPECIAL_258>",
262
+ "<SPECIAL_259>",
263
+ "<SPECIAL_260>",
264
+ "<SPECIAL_261>",
265
+ "<SPECIAL_262>",
266
+ "<SPECIAL_263>",
267
+ "<SPECIAL_264>",
268
+ "<SPECIAL_265>",
269
+ "<SPECIAL_266>",
270
+ "<SPECIAL_267>",
271
+ "<SPECIAL_268>",
272
+ "<SPECIAL_269>",
273
+ "<SPECIAL_270>",
274
+ "<SPECIAL_271>",
275
+ "<SPECIAL_272>",
276
+ "<SPECIAL_273>",
277
+ "<SPECIAL_274>",
278
+ "<SPECIAL_275>",
279
+ "<SPECIAL_276>",
280
+ "<SPECIAL_277>",
281
+ "<SPECIAL_278>",
282
+ "<SPECIAL_279>",
283
+ "<SPECIAL_280>",
284
+ "<SPECIAL_281>",
285
+ "<SPECIAL_282>",
286
+ "<SPECIAL_283>",
287
+ "<SPECIAL_284>",
288
+ "<SPECIAL_285>",
289
+ "<SPECIAL_286>",
290
+ "<SPECIAL_287>",
291
+ "<SPECIAL_288>",
292
+ "<SPECIAL_289>",
293
+ "<SPECIAL_290>",
294
+ "<SPECIAL_291>",
295
+ "<SPECIAL_292>",
296
+ "<SPECIAL_293>",
297
+ "<SPECIAL_294>",
298
+ "<SPECIAL_295>",
299
+ "<SPECIAL_296>",
300
+ "<SPECIAL_297>",
301
+ "<SPECIAL_298>",
302
+ "<SPECIAL_299>",
303
+ "<SPECIAL_300>",
304
+ "<SPECIAL_301>",
305
+ "<SPECIAL_302>",
306
+ "<SPECIAL_303>",
307
+ "<SPECIAL_304>",
308
+ "<SPECIAL_305>",
309
+ "<SPECIAL_306>",
310
+ "<SPECIAL_307>",
311
+ "<SPECIAL_308>",
312
+ "<SPECIAL_309>",
313
+ "<SPECIAL_310>",
314
+ "<SPECIAL_311>",
315
+ "<SPECIAL_312>",
316
+ "<SPECIAL_313>",
317
+ "<SPECIAL_314>",
318
+ "<SPECIAL_315>",
319
+ "<SPECIAL_316>",
320
+ "<SPECIAL_317>",
321
+ "<SPECIAL_318>",
322
+ "<SPECIAL_319>",
323
+ "<SPECIAL_320>",
324
+ "<SPECIAL_321>",
325
+ "<SPECIAL_322>",
326
+ "<SPECIAL_323>",
327
+ "<SPECIAL_324>",
328
+ "<SPECIAL_325>",
329
+ "<SPECIAL_326>",
330
+ "<SPECIAL_327>",
331
+ "<SPECIAL_328>",
332
+ "<SPECIAL_329>",
333
+ "<SPECIAL_330>",
334
+ "<SPECIAL_331>",
335
+ "<SPECIAL_332>",
336
+ "<SPECIAL_333>",
337
+ "<SPECIAL_334>",
338
+ "<SPECIAL_335>",
339
+ "<SPECIAL_336>",
340
+ "<SPECIAL_337>",
341
+ "<SPECIAL_338>",
342
+ "<SPECIAL_339>",
343
+ "<SPECIAL_340>",
344
+ "<SPECIAL_341>",
345
+ "<SPECIAL_342>",
346
+ "<SPECIAL_343>",
347
+ "<SPECIAL_344>",
348
+ "<SPECIAL_345>",
349
+ "<SPECIAL_346>",
350
+ "<SPECIAL_347>",
351
+ "<SPECIAL_348>",
352
+ "<SPECIAL_349>",
353
+ "<SPECIAL_350>",
354
+ "<SPECIAL_351>",
355
+ "<SPECIAL_352>",
356
+ "<SPECIAL_353>",
357
+ "<SPECIAL_354>",
358
+ "<SPECIAL_355>",
359
+ "<SPECIAL_356>",
360
+ "<SPECIAL_357>",
361
+ "<SPECIAL_358>",
362
+ "<SPECIAL_359>",
363
+ "<SPECIAL_360>",
364
+ "<SPECIAL_361>",
365
+ "<SPECIAL_362>",
366
+ "<SPECIAL_363>",
367
+ "<SPECIAL_364>",
368
+ "<SPECIAL_365>",
369
+ "<SPECIAL_366>",
370
+ "<SPECIAL_367>",
371
+ "<SPECIAL_368>",
372
+ "<SPECIAL_369>",
373
+ "<SPECIAL_370>",
374
+ "<SPECIAL_371>",
375
+ "<SPECIAL_372>",
376
+ "<SPECIAL_373>",
377
+ "<SPECIAL_374>",
378
+ "<SPECIAL_375>",
379
+ "<SPECIAL_376>",
380
+ "<SPECIAL_377>",
381
+ "<SPECIAL_378>",
382
+ "<SPECIAL_379>",
383
+ "<SPECIAL_380>",
384
+ "<SPECIAL_381>",
385
+ "<SPECIAL_382>",
386
+ "<SPECIAL_383>",
387
+ "<SPECIAL_384>",
388
+ "<SPECIAL_385>",
389
+ "<SPECIAL_386>",
390
+ "<SPECIAL_387>",
391
+ "<SPECIAL_388>",
392
+ "<SPECIAL_389>",
393
+ "<SPECIAL_390>",
394
+ "<SPECIAL_391>",
395
+ "<SPECIAL_392>",
396
+ "<SPECIAL_393>",
397
+ "<SPECIAL_394>",
398
+ "<SPECIAL_395>",
399
+ "<SPECIAL_396>",
400
+ "<SPECIAL_397>",
401
+ "<SPECIAL_398>",
402
+ "<SPECIAL_399>",
403
+ "<SPECIAL_400>",
404
+ "<SPECIAL_401>",
405
+ "<SPECIAL_402>",
406
+ "<SPECIAL_403>",
407
+ "<SPECIAL_404>",
408
+ "<SPECIAL_405>",
409
+ "<SPECIAL_406>",
410
+ "<SPECIAL_407>",
411
+ "<SPECIAL_408>",
412
+ "<SPECIAL_409>",
413
+ "<SPECIAL_410>",
414
+ "<SPECIAL_411>",
415
+ "<SPECIAL_412>",
416
+ "<SPECIAL_413>",
417
+ "<SPECIAL_414>",
418
+ "<SPECIAL_415>",
419
+ "<SPECIAL_416>",
420
+ "<SPECIAL_417>",
421
+ "<SPECIAL_418>",
422
+ "<SPECIAL_419>",
423
+ "<SPECIAL_420>",
424
+ "<SPECIAL_421>",
425
+ "<SPECIAL_422>",
426
+ "<SPECIAL_423>",
427
+ "<SPECIAL_424>",
428
+ "<SPECIAL_425>",
429
+ "<SPECIAL_426>",
430
+ "<SPECIAL_427>",
431
+ "<SPECIAL_428>",
432
+ "<SPECIAL_429>",
433
+ "<SPECIAL_430>",
434
+ "<SPECIAL_431>",
435
+ "<SPECIAL_432>",
436
+ "<SPECIAL_433>",
437
+ "<SPECIAL_434>",
438
+ "<SPECIAL_435>",
439
+ "<SPECIAL_436>",
440
+ "<SPECIAL_437>",
441
+ "<SPECIAL_438>",
442
+ "<SPECIAL_439>",
443
+ "<SPECIAL_440>",
444
+ "<SPECIAL_441>",
445
+ "<SPECIAL_442>",
446
+ "<SPECIAL_443>",
447
+ "<SPECIAL_444>",
448
+ "<SPECIAL_445>",
449
+ "<SPECIAL_446>",
450
+ "<SPECIAL_447>",
451
+ "<SPECIAL_448>",
452
+ "<SPECIAL_449>",
453
+ "<SPECIAL_450>",
454
+ "<SPECIAL_451>",
455
+ "<SPECIAL_452>",
456
+ "<SPECIAL_453>",
457
+ "<SPECIAL_454>",
458
+ "<SPECIAL_455>",
459
+ "<SPECIAL_456>",
460
+ "<SPECIAL_457>",
461
+ "<SPECIAL_458>",
462
+ "<SPECIAL_459>",
463
+ "<SPECIAL_460>",
464
+ "<SPECIAL_461>",
465
+ "<SPECIAL_462>",
466
+ "<SPECIAL_463>",
467
+ "<SPECIAL_464>",
468
+ "<SPECIAL_465>",
469
+ "<SPECIAL_466>",
470
+ "<SPECIAL_467>",
471
+ "<SPECIAL_468>",
472
+ "<SPECIAL_469>",
473
+ "<SPECIAL_470>",
474
+ "<SPECIAL_471>",
475
+ "<SPECIAL_472>",
476
+ "<SPECIAL_473>",
477
+ "<SPECIAL_474>",
478
+ "<SPECIAL_475>",
479
+ "<SPECIAL_476>",
480
+ "<SPECIAL_477>",
481
+ "<SPECIAL_478>",
482
+ "<SPECIAL_479>",
483
+ "<SPECIAL_480>",
484
+ "<SPECIAL_481>",
485
+ "<SPECIAL_482>",
486
+ "<SPECIAL_483>",
487
+ "<SPECIAL_484>",
488
+ "<SPECIAL_485>",
489
+ "<SPECIAL_486>",
490
+ "<SPECIAL_487>",
491
+ "<SPECIAL_488>",
492
+ "<SPECIAL_489>",
493
+ "<SPECIAL_490>",
494
+ "<SPECIAL_491>",
495
+ "<SPECIAL_492>",
496
+ "<SPECIAL_493>",
497
+ "<SPECIAL_494>",
498
+ "<SPECIAL_495>",
499
+ "<SPECIAL_496>",
500
+ "<SPECIAL_497>",
501
+ "<SPECIAL_498>",
502
+ "<SPECIAL_499>",
503
+ "<SPECIAL_500>",
504
+ "<SPECIAL_501>",
505
+ "<SPECIAL_502>",
506
+ "<SPECIAL_503>",
507
+ "<SPECIAL_504>",
508
+ "<SPECIAL_505>",
509
+ "<SPECIAL_506>",
510
+ "<SPECIAL_507>",
511
+ "<SPECIAL_508>",
512
+ "<SPECIAL_509>",
513
+ "<SPECIAL_510>",
514
+ "<SPECIAL_511>",
515
+ "<SPECIAL_512>",
516
+ "<SPECIAL_513>",
517
+ "<SPECIAL_514>",
518
+ "<SPECIAL_515>",
519
+ "<SPECIAL_516>",
520
+ "<SPECIAL_517>",
521
+ "<SPECIAL_518>",
522
+ "<SPECIAL_519>",
523
+ "<SPECIAL_520>",
524
+ "<SPECIAL_521>",
525
+ "<SPECIAL_522>",
526
+ "<SPECIAL_523>",
527
+ "<SPECIAL_524>",
528
+ "<SPECIAL_525>",
529
+ "<SPECIAL_526>",
530
+ "<SPECIAL_527>",
531
+ "<SPECIAL_528>",
532
+ "<SPECIAL_529>",
533
+ "<SPECIAL_530>",
534
+ "<SPECIAL_531>",
535
+ "<SPECIAL_532>",
536
+ "<SPECIAL_533>",
537
+ "<SPECIAL_534>",
538
+ "<SPECIAL_535>",
539
+ "<SPECIAL_536>",
540
+ "<SPECIAL_537>",
541
+ "<SPECIAL_538>",
542
+ "<SPECIAL_539>",
543
+ "<SPECIAL_540>",
544
+ "<SPECIAL_541>",
545
+ "<SPECIAL_542>",
546
+ "<SPECIAL_543>",
547
+ "<SPECIAL_544>",
548
+ "<SPECIAL_545>",
549
+ "<SPECIAL_546>",
550
+ "<SPECIAL_547>",
551
+ "<SPECIAL_548>",
552
+ "<SPECIAL_549>",
553
+ "<SPECIAL_550>",
554
+ "<SPECIAL_551>",
555
+ "<SPECIAL_552>",
556
+ "<SPECIAL_553>",
557
+ "<SPECIAL_554>",
558
+ "<SPECIAL_555>",
559
+ "<SPECIAL_556>",
560
+ "<SPECIAL_557>",
561
+ "<SPECIAL_558>",
562
+ "<SPECIAL_559>",
563
+ "<SPECIAL_560>",
564
+ "<SPECIAL_561>",
565
+ "<SPECIAL_562>",
566
+ "<SPECIAL_563>",
567
+ "<SPECIAL_564>",
568
+ "<SPECIAL_565>",
569
+ "<SPECIAL_566>",
570
+ "<SPECIAL_567>",
571
+ "<SPECIAL_568>",
572
+ "<SPECIAL_569>",
573
+ "<SPECIAL_570>",
574
+ "<SPECIAL_571>",
575
+ "<SPECIAL_572>",
576
+ "<SPECIAL_573>",
577
+ "<SPECIAL_574>",
578
+ "<SPECIAL_575>",
579
+ "<SPECIAL_576>",
580
+ "<SPECIAL_577>",
581
+ "<SPECIAL_578>",
582
+ "<SPECIAL_579>",
583
+ "<SPECIAL_580>",
584
+ "<SPECIAL_581>",
585
+ "<SPECIAL_582>",
586
+ "<SPECIAL_583>",
587
+ "<SPECIAL_584>",
588
+ "<SPECIAL_585>",
589
+ "<SPECIAL_586>",
590
+ "<SPECIAL_587>",
591
+ "<SPECIAL_588>",
592
+ "<SPECIAL_589>",
593
+ "<SPECIAL_590>",
594
+ "<SPECIAL_591>",
595
+ "<SPECIAL_592>",
596
+ "<SPECIAL_593>",
597
+ "<SPECIAL_594>",
598
+ "<SPECIAL_595>",
599
+ "<SPECIAL_596>",
600
+ "<SPECIAL_597>",
601
+ "<SPECIAL_598>",
602
+ "<SPECIAL_599>",
603
+ "<SPECIAL_600>",
604
+ "<SPECIAL_601>",
605
+ "<SPECIAL_602>",
606
+ "<SPECIAL_603>",
607
+ "<SPECIAL_604>",
608
+ "<SPECIAL_605>",
609
+ "<SPECIAL_606>",
610
+ "<SPECIAL_607>",
611
+ "<SPECIAL_608>",
612
+ "<SPECIAL_609>",
613
+ "<SPECIAL_610>",
614
+ "<SPECIAL_611>",
615
+ "<SPECIAL_612>",
616
+ "<SPECIAL_613>",
617
+ "<SPECIAL_614>",
618
+ "<SPECIAL_615>",
619
+ "<SPECIAL_616>",
620
+ "<SPECIAL_617>",
621
+ "<SPECIAL_618>",
622
+ "<SPECIAL_619>",
623
+ "<SPECIAL_620>",
624
+ "<SPECIAL_621>",
625
+ "<SPECIAL_622>",
626
+ "<SPECIAL_623>",
627
+ "<SPECIAL_624>",
628
+ "<SPECIAL_625>",
629
+ "<SPECIAL_626>",
630
+ "<SPECIAL_627>",
631
+ "<SPECIAL_628>",
632
+ "<SPECIAL_629>",
633
+ "<SPECIAL_630>",
634
+ "<SPECIAL_631>",
635
+ "<SPECIAL_632>",
636
+ "<SPECIAL_633>",
637
+ "<SPECIAL_634>",
638
+ "<SPECIAL_635>",
639
+ "<SPECIAL_636>",
640
+ "<SPECIAL_637>",
641
+ "<SPECIAL_638>",
642
+ "<SPECIAL_639>",
643
+ "<SPECIAL_640>",
644
+ "<SPECIAL_641>",
645
+ "<SPECIAL_642>",
646
+ "<SPECIAL_643>",
647
+ "<SPECIAL_644>",
648
+ "<SPECIAL_645>",
649
+ "<SPECIAL_646>",
650
+ "<SPECIAL_647>",
651
+ "<SPECIAL_648>",
652
+ "<SPECIAL_649>",
653
+ "<SPECIAL_650>",
654
+ "<SPECIAL_651>",
655
+ "<SPECIAL_652>",
656
+ "<SPECIAL_653>",
657
+ "<SPECIAL_654>",
658
+ "<SPECIAL_655>",
659
+ "<SPECIAL_656>",
660
+ "<SPECIAL_657>",
661
+ "<SPECIAL_658>",
662
+ "<SPECIAL_659>",
663
+ "<SPECIAL_660>",
664
+ "<SPECIAL_661>",
665
+ "<SPECIAL_662>",
666
+ "<SPECIAL_663>",
667
+ "<SPECIAL_664>",
668
+ "<SPECIAL_665>",
669
+ "<SPECIAL_666>",
670
+ "<SPECIAL_667>",
671
+ "<SPECIAL_668>",
672
+ "<SPECIAL_669>",
673
+ "<SPECIAL_670>",
674
+ "<SPECIAL_671>",
675
+ "<SPECIAL_672>",
676
+ "<SPECIAL_673>",
677
+ "<SPECIAL_674>",
678
+ "<SPECIAL_675>",
679
+ "<SPECIAL_676>",
680
+ "<SPECIAL_677>",
681
+ "<SPECIAL_678>",
682
+ "<SPECIAL_679>",
683
+ "<SPECIAL_680>",
684
+ "<SPECIAL_681>",
685
+ "<SPECIAL_682>",
686
+ "<SPECIAL_683>",
687
+ "<SPECIAL_684>",
688
+ "<SPECIAL_685>",
689
+ "<SPECIAL_686>",
690
+ "<SPECIAL_687>",
691
+ "<SPECIAL_688>",
692
+ "<SPECIAL_689>",
693
+ "<SPECIAL_690>",
694
+ "<SPECIAL_691>",
695
+ "<SPECIAL_692>",
696
+ "<SPECIAL_693>",
697
+ "<SPECIAL_694>",
698
+ "<SPECIAL_695>",
699
+ "<SPECIAL_696>",
700
+ "<SPECIAL_697>",
701
+ "<SPECIAL_698>",
702
+ "<SPECIAL_699>",
703
+ "<SPECIAL_700>",
704
+ "<SPECIAL_701>",
705
+ "<SPECIAL_702>",
706
+ "<SPECIAL_703>",
707
+ "<SPECIAL_704>",
708
+ "<SPECIAL_705>",
709
+ "<SPECIAL_706>",
710
+ "<SPECIAL_707>",
711
+ "<SPECIAL_708>",
712
+ "<SPECIAL_709>",
713
+ "<SPECIAL_710>",
714
+ "<SPECIAL_711>",
715
+ "<SPECIAL_712>",
716
+ "<SPECIAL_713>",
717
+ "<SPECIAL_714>",
718
+ "<SPECIAL_715>",
719
+ "<SPECIAL_716>",
720
+ "<SPECIAL_717>",
721
+ "<SPECIAL_718>",
722
+ "<SPECIAL_719>",
723
+ "<SPECIAL_720>",
724
+ "<SPECIAL_721>",
725
+ "<SPECIAL_722>",
726
+ "<SPECIAL_723>",
727
+ "<SPECIAL_724>",
728
+ "<SPECIAL_725>",
729
+ "<SPECIAL_726>",
730
+ "<SPECIAL_727>",
731
+ "<SPECIAL_728>",
732
+ "<SPECIAL_729>",
733
+ "<SPECIAL_730>",
734
+ "<SPECIAL_731>",
735
+ "<SPECIAL_732>",
736
+ "<SPECIAL_733>",
737
+ "<SPECIAL_734>",
738
+ "<SPECIAL_735>",
739
+ "<SPECIAL_736>",
740
+ "<SPECIAL_737>",
741
+ "<SPECIAL_738>",
742
+ "<SPECIAL_739>",
743
+ "<SPECIAL_740>",
744
+ "<SPECIAL_741>",
745
+ "<SPECIAL_742>",
746
+ "<SPECIAL_743>",
747
+ "<SPECIAL_744>",
748
+ "<SPECIAL_745>",
749
+ "<SPECIAL_746>",
750
+ "<SPECIAL_747>",
751
+ "<SPECIAL_748>",
752
+ "<SPECIAL_749>",
753
+ "<SPECIAL_750>",
754
+ "<SPECIAL_751>",
755
+ "<SPECIAL_752>",
756
+ "<SPECIAL_753>",
757
+ "<SPECIAL_754>",
758
+ "<SPECIAL_755>",
759
+ "<SPECIAL_756>",
760
+ "<SPECIAL_757>",
761
+ "<SPECIAL_758>",
762
+ "<SPECIAL_759>",
763
+ "<SPECIAL_760>",
764
+ "<SPECIAL_761>",
765
+ "<SPECIAL_762>",
766
+ "<SPECIAL_763>",
767
+ "<SPECIAL_764>",
768
+ "<SPECIAL_765>",
769
+ "<SPECIAL_766>",
770
+ "<SPECIAL_767>",
771
+ "<SPECIAL_768>",
772
+ "<SPECIAL_769>",
773
+ "<SPECIAL_770>",
774
+ "<SPECIAL_771>",
775
+ "<SPECIAL_772>",
776
+ "<SPECIAL_773>",
777
+ "<SPECIAL_774>",
778
+ "<SPECIAL_775>",
779
+ "<SPECIAL_776>",
780
+ "<SPECIAL_777>",
781
+ "<SPECIAL_778>",
782
+ "<SPECIAL_779>",
783
+ "<SPECIAL_780>",
784
+ "<SPECIAL_781>",
785
+ "<SPECIAL_782>",
786
+ "<SPECIAL_783>",
787
+ "<SPECIAL_784>",
788
+ "<SPECIAL_785>",
789
+ "<SPECIAL_786>",
790
+ "<SPECIAL_787>",
791
+ "<SPECIAL_788>",
792
+ "<SPECIAL_789>",
793
+ "<SPECIAL_790>",
794
+ "<SPECIAL_791>",
795
+ "<SPECIAL_792>",
796
+ "<SPECIAL_793>",
797
+ "<SPECIAL_794>",
798
+ "<SPECIAL_795>",
799
+ "<SPECIAL_796>",
800
+ "<SPECIAL_797>",
801
+ "<SPECIAL_798>",
802
+ "<SPECIAL_799>",
803
+ "<SPECIAL_800>",
804
+ "<SPECIAL_801>",
805
+ "<SPECIAL_802>",
806
+ "<SPECIAL_803>",
807
+ "<SPECIAL_804>",
808
+ "<SPECIAL_805>",
809
+ "<SPECIAL_806>",
810
+ "<SPECIAL_807>",
811
+ "<SPECIAL_808>",
812
+ "<SPECIAL_809>",
813
+ "<SPECIAL_810>",
814
+ "<SPECIAL_811>",
815
+ "<SPECIAL_812>",
816
+ "<SPECIAL_813>",
817
+ "<SPECIAL_814>",
818
+ "<SPECIAL_815>",
819
+ "<SPECIAL_816>",
820
+ "<SPECIAL_817>",
821
+ "<SPECIAL_818>",
822
+ "<SPECIAL_819>",
823
+ "<SPECIAL_820>",
824
+ "<SPECIAL_821>",
825
+ "<SPECIAL_822>",
826
+ "<SPECIAL_823>",
827
+ "<SPECIAL_824>",
828
+ "<SPECIAL_825>",
829
+ "<SPECIAL_826>",
830
+ "<SPECIAL_827>",
831
+ "<SPECIAL_828>",
832
+ "<SPECIAL_829>",
833
+ "<SPECIAL_830>",
834
+ "<SPECIAL_831>",
835
+ "<SPECIAL_832>",
836
+ "<SPECIAL_833>",
837
+ "<SPECIAL_834>",
838
+ "<SPECIAL_835>",
839
+ "<SPECIAL_836>",
840
+ "<SPECIAL_837>",
841
+ "<SPECIAL_838>",
842
+ "<SPECIAL_839>",
843
+ "<SPECIAL_840>",
844
+ "<SPECIAL_841>",
845
+ "<SPECIAL_842>",
846
+ "<SPECIAL_843>",
847
+ "<SPECIAL_844>",
848
+ "<SPECIAL_845>",
849
+ "<SPECIAL_846>",
850
+ "<SPECIAL_847>",
851
+ "<SPECIAL_848>",
852
+ "<SPECIAL_849>",
853
+ "<SPECIAL_850>",
854
+ "<SPECIAL_851>",
855
+ "<SPECIAL_852>",
856
+ "<SPECIAL_853>",
857
+ "<SPECIAL_854>",
858
+ "<SPECIAL_855>",
859
+ "<SPECIAL_856>",
860
+ "<SPECIAL_857>",
861
+ "<SPECIAL_858>",
862
+ "<SPECIAL_859>",
863
+ "<SPECIAL_860>",
864
+ "<SPECIAL_861>",
865
+ "<SPECIAL_862>",
866
+ "<SPECIAL_863>",
867
+ "<SPECIAL_864>",
868
+ "<SPECIAL_865>",
869
+ "<SPECIAL_866>",
870
+ "<SPECIAL_867>",
871
+ "<SPECIAL_868>",
872
+ "<SPECIAL_869>",
873
+ "<SPECIAL_870>",
874
+ "<SPECIAL_871>",
875
+ "<SPECIAL_872>",
876
+ "<SPECIAL_873>",
877
+ "<SPECIAL_874>",
878
+ "<SPECIAL_875>",
879
+ "<SPECIAL_876>",
880
+ "<SPECIAL_877>",
881
+ "<SPECIAL_878>",
882
+ "<SPECIAL_879>",
883
+ "<SPECIAL_880>",
884
+ "<SPECIAL_881>",
885
+ "<SPECIAL_882>",
886
+ "<SPECIAL_883>",
887
+ "<SPECIAL_884>",
888
+ "<SPECIAL_885>",
889
+ "<SPECIAL_886>",
890
+ "<SPECIAL_887>",
891
+ "<SPECIAL_888>",
892
+ "<SPECIAL_889>",
893
+ "<SPECIAL_890>",
894
+ "<SPECIAL_891>",
895
+ "<SPECIAL_892>",
896
+ "<SPECIAL_893>",
897
+ "<SPECIAL_894>",
898
+ "<SPECIAL_895>",
899
+ "<SPECIAL_896>",
900
+ "<SPECIAL_897>",
901
+ "<SPECIAL_898>",
902
+ "<SPECIAL_899>",
903
+ "<SPECIAL_900>",
904
+ "<SPECIAL_901>",
905
+ "<SPECIAL_902>",
906
+ "<SPECIAL_903>",
907
+ "<SPECIAL_904>",
908
+ "<SPECIAL_905>",
909
+ "<SPECIAL_906>",
910
+ "<SPECIAL_907>",
911
+ "<SPECIAL_908>",
912
+ "<SPECIAL_909>",
913
+ "<SPECIAL_910>",
914
+ "<SPECIAL_911>",
915
+ "<SPECIAL_912>",
916
+ "<SPECIAL_913>",
917
+ "<SPECIAL_914>",
918
+ "<SPECIAL_915>",
919
+ "<SPECIAL_916>",
920
+ "<SPECIAL_917>",
921
+ "<SPECIAL_918>",
922
+ "<SPECIAL_919>",
923
+ "<SPECIAL_920>",
924
+ "<SPECIAL_921>",
925
+ "<SPECIAL_922>",
926
+ "<SPECIAL_923>",
927
+ "<SPECIAL_924>",
928
+ "<SPECIAL_925>",
929
+ "<SPECIAL_926>",
930
+ "<SPECIAL_927>",
931
+ "<SPECIAL_928>",
932
+ "<SPECIAL_929>",
933
+ "<SPECIAL_930>",
934
+ "<SPECIAL_931>",
935
+ "<SPECIAL_932>",
936
+ "<SPECIAL_933>",
937
+ "<SPECIAL_934>",
938
+ "<SPECIAL_935>",
939
+ "<SPECIAL_936>",
940
+ "<SPECIAL_937>",
941
+ "<SPECIAL_938>",
942
+ "<SPECIAL_939>",
943
+ "<SPECIAL_940>",
944
+ "<SPECIAL_941>",
945
+ "<SPECIAL_942>",
946
+ "<SPECIAL_943>",
947
+ "<SPECIAL_944>",
948
+ "<SPECIAL_945>",
949
+ "<SPECIAL_946>",
950
+ "<SPECIAL_947>",
951
+ "<SPECIAL_948>",
952
+ "<SPECIAL_949>",
953
+ "<SPECIAL_950>",
954
+ "<SPECIAL_951>",
955
+ "<SPECIAL_952>",
956
+ "<SPECIAL_953>",
957
+ "<SPECIAL_954>",
958
+ "<SPECIAL_955>",
959
+ "<SPECIAL_956>",
960
+ "<SPECIAL_957>",
961
+ "<SPECIAL_958>",
962
+ "<SPECIAL_959>",
963
+ "<SPECIAL_960>",
964
+ "<SPECIAL_961>",
965
+ "<SPECIAL_962>",
966
+ "<SPECIAL_963>",
967
+ "<SPECIAL_964>",
968
+ "<SPECIAL_965>",
969
+ "<SPECIAL_966>",
970
+ "<SPECIAL_967>",
971
+ "<SPECIAL_968>",
972
+ "<SPECIAL_969>",
973
+ "<SPECIAL_970>",
974
+ "<SPECIAL_971>",
975
+ "<SPECIAL_972>",
976
+ "<SPECIAL_973>",
977
+ "<SPECIAL_974>",
978
+ "<SPECIAL_975>",
979
+ "<SPECIAL_976>",
980
+ "<SPECIAL_977>",
981
+ "<SPECIAL_978>",
982
+ "<SPECIAL_979>",
983
+ "<SPECIAL_980>",
984
+ "<SPECIAL_981>",
985
+ "<SPECIAL_982>",
986
+ "<SPECIAL_983>",
987
+ "<SPECIAL_984>",
988
+ "<SPECIAL_985>",
989
+ "<SPECIAL_986>",
990
+ "<SPECIAL_987>",
991
+ "<SPECIAL_988>",
992
+ "<SPECIAL_989>",
993
+ "<SPECIAL_990>",
994
+ "<SPECIAL_991>",
995
+ "<SPECIAL_992>",
996
+ "<SPECIAL_993>",
997
+ "<SPECIAL_994>",
998
+ "<SPECIAL_995>",
999
+ "<SPECIAL_996>",
1000
+ "<SPECIAL_997>",
1001
+ "<SPECIAL_998>",
1002
+ "<SPECIAL_999>"
1003
+ ],
1004
+ "bos_token": {
1005
+ "content": "<s>",
1006
+ "lstrip": false,
1007
+ "normalized": false,
1008
+ "rstrip": false,
1009
+ "single_word": false
1010
+ },
1011
+ "eos_token": {
1012
+ "content": "</s>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false
1017
+ },
1018
+ "pad_token": {
1019
+ "content": "<pad>",
1020
+ "lstrip": false,
1021
+ "normalized": false,
1022
+ "rstrip": false,
1023
+ "single_word": false
1024
+ },
1025
+ "unk_token": {
1026
+ "content": "<unk>",
1027
+ "lstrip": false,
1028
+ "normalized": false,
1029
+ "rstrip": false,
1030
+ "single_word": false
1031
+ }
1032
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b76085f9923309d873994d444989f7eb6ec074b06f25b58f1e8d7b7741070949
3
+ size 17078037
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff