STiFLeR7 commited on
Commit
e91eb74
·
verified ·
1 Parent(s): 0056b5d

Upload folder using huggingface_hub

Browse files
added_tokens.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "\t\t": 50294,
3
+ "\t\t\t": 50293,
4
+ "\t\t\t\t": 50292,
5
+ "\t\t\t\t\t": 50291,
6
+ "\t\t\t\t\t\t": 50290,
7
+ "\t\t\t\t\t\t\t": 50289,
8
+ "\t\t\t\t\t\t\t\t": 50288,
9
+ "\t\t\t\t\t\t\t\t\t": 50287,
10
+ " ": 50286,
11
+ " ": 50285,
12
+ " ": 50284,
13
+ " ": 50283,
14
+ " ": 50282,
15
+ " ": 50281,
16
+ " ": 50280,
17
+ " ": 50279,
18
+ " ": 50278,
19
+ " ": 50277,
20
+ " ": 50276,
21
+ " ": 50275,
22
+ " ": 50274,
23
+ " ": 50273,
24
+ " ": 50272,
25
+ " ": 50271,
26
+ " ": 50270,
27
+ " ": 50269,
28
+ " ": 50268,
29
+ " ": 50267,
30
+ " ": 50266,
31
+ " ": 50265,
32
+ " ": 50264,
33
+ " ": 50263,
34
+ " ": 50262,
35
+ " ": 50261,
36
+ " ": 50260,
37
+ " ": 50259,
38
+ " ": 50258,
39
+ " ": 50257
40
+ }
config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "D:\\Phi2\\phi-2",
3
+ "architectures": [
4
+ "PhiForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 50256,
8
+ "embd_pdrop": 0.0,
9
+ "eos_token_id": 50256,
10
+ "hidden_act": "gelu_new",
11
+ "hidden_size": 2560,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 10240,
14
+ "layer_norm_eps": 1e-05,
15
+ "max_position_embeddings": 2048,
16
+ "model_type": "phi",
17
+ "num_attention_heads": 32,
18
+ "num_hidden_layers": 32,
19
+ "num_key_value_heads": 32,
20
+ "partial_rotary_factor": 0.4,
21
+ "qk_layernorm": false,
22
+ "quantization_config": {
23
+ "bits": 8,
24
+ "checkpoint_format": "gptq",
25
+ "desc_act": true,
26
+ "group_size": 128,
27
+ "lm_head": false,
28
+ "meta": {
29
+ "damp_auto_increment": 0.0025,
30
+ "damp_percent": 0.01,
31
+ "mse": 0.0,
32
+ "quantizer": [
33
+ "gptqmodel:2.2.0"
34
+ ],
35
+ "static_groups": false,
36
+ "true_sequential": true,
37
+ "uri": "https://github.com/modelcloud/gptqmodel"
38
+ },
39
+ "pack_dtype": "int32",
40
+ "quant_method": "gptq",
41
+ "sym": true
42
+ },
43
+ "resid_pdrop": 0.1,
44
+ "rope_scaling": null,
45
+ "rope_theta": 10000.0,
46
+ "tie_word_embeddings": false,
47
+ "torch_dtype": "float16",
48
+ "transformers_version": "4.49.0",
49
+ "use_cache": true,
50
+ "vocab_size": 51200
51
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 50256,
4
+ "eos_token_id": 50256,
5
+ "transformers_version": "4.49.0"
6
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e488b8affca490ea4a41ba5cc3a8e3e6f899ce8e72da16fcb0e79c7d6cddcf2c
3
+ size 3104829568
quant_log.csv ADDED
@@ -0,0 +1,193 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ layer,module,loss,samples,damp,time
2
+ 0,self_attn.q_proj,0.00143004,0.01000,1.666
3
+ 0,self_attn.k_proj,0.00152641,0.01000,1.244
4
+ 0,self_attn.v_proj,0.00055014,0.01000,1.224
5
+ 0,self_attn.dense,0.00040645,0.01000,1.199
6
+ 0,mlp.fc1,0.00156395,0.01000,1.471
7
+ 0,mlp.fc2,45.18684896,0.01250,8.215
8
+ 1,self_attn.q_proj,0.00098901,0.01000,1.230
9
+ 1,self_attn.k_proj,0.00098037,0.01000,1.193
10
+ 1,self_attn.v_proj,0.00025730,0.01000,1.200
11
+ 1,self_attn.dense,0.00010639,0.01000,1.247
12
+ 1,mlp.fc1,0.00321254,0.01000,1.491
13
+ 1,mlp.fc2,47.71025594,0.01250,8.363
14
+ 2,self_attn.q_proj,0.00124894,0.01000,1.345
15
+ 2,self_attn.k_proj,0.00125460,0.01000,1.299
16
+ 2,self_attn.v_proj,0.00052309,0.01000,1.253
17
+ 2,self_attn.dense,0.00011987,0.01000,1.390
18
+ 2,mlp.fc1,0.00455974,0.01000,1.709
19
+ 2,mlp.fc2,51.62885539,0.01250,8.104
20
+ 3,self_attn.q_proj,0.00130572,0.01000,1.189
21
+ 3,self_attn.k_proj,0.00125844,0.01000,1.239
22
+ 3,self_attn.v_proj,0.00100104,0.01000,1.399
23
+ 3,self_attn.dense,0.00010456,0.01000,1.400
24
+ 3,mlp.fc1,0.00656331,0.01000,1.650
25
+ 3,mlp.fc2,52.00990804,0.01250,8.130
26
+ 4,self_attn.q_proj,0.00165055,0.01000,1.275
27
+ 4,self_attn.k_proj,0.00151279,0.01000,1.267
28
+ 4,self_attn.v_proj,0.00109894,0.01000,1.210
29
+ 4,self_attn.dense,0.00008104,0.01000,1.164
30
+ 4,mlp.fc1,0.00635077,0.01000,1.470
31
+ 4,mlp.fc2,45.79121908,0.01250,7.909
32
+ 5,self_attn.q_proj,0.00146975,0.01000,1.218
33
+ 5,self_attn.k_proj,0.00134510,0.01000,1.212
34
+ 5,self_attn.v_proj,0.00096357,0.01000,1.230
35
+ 5,self_attn.dense,0.00009522,0.01000,1.167
36
+ 5,mlp.fc1,0.00543682,0.01000,1.510
37
+ 5,mlp.fc2,39.91021474,0.01250,8.151
38
+ 6,self_attn.q_proj,0.00179219,0.01000,1.304
39
+ 6,self_attn.k_proj,0.00164673,0.01000,1.153
40
+ 6,self_attn.v_proj,0.00118458,0.01000,1.130
41
+ 6,self_attn.dense,0.00008672,0.01000,1.179
42
+ 6,mlp.fc1,0.00587255,0.01000,1.484
43
+ 6,mlp.fc2,44.64824422,0.01250,8.148
44
+ 7,self_attn.q_proj,0.00186776,0.01000,1.171
45
+ 7,self_attn.k_proj,0.00177774,0.01000,1.137
46
+ 7,self_attn.v_proj,0.00120310,0.01000,1.267
47
+ 7,self_attn.dense,0.00006188,0.01000,1.291
48
+ 7,mlp.fc1,0.00593133,0.01000,1.634
49
+ 7,mlp.fc2,39.13363139,0.01250,8.085
50
+ 8,self_attn.q_proj,0.00189651,0.01000,1.240
51
+ 8,self_attn.k_proj,0.00176293,0.01000,1.228
52
+ 8,self_attn.v_proj,0.00140262,0.01000,1.245
53
+ 8,self_attn.dense,0.00006599,0.01000,1.192
54
+ 8,mlp.fc1,0.00588503,0.01000,1.698
55
+ 8,mlp.fc2,40.45288086,0.01250,8.500
56
+ 9,self_attn.q_proj,0.00191524,0.01000,1.303
57
+ 9,self_attn.k_proj,0.00177061,0.01000,1.146
58
+ 9,self_attn.v_proj,0.00140069,0.01000,1.155
59
+ 9,self_attn.dense,0.00008725,0.01000,1.173
60
+ 9,mlp.fc1,0.00577327,0.01000,1.504
61
+ 9,mlp.fc2,41.11215210,0.01250,8.315
62
+ 10,self_attn.q_proj,0.00192486,0.01000,1.232
63
+ 10,self_attn.k_proj,0.00184659,0.01000,1.287
64
+ 10,self_attn.v_proj,0.00122425,0.01000,1.217
65
+ 10,self_attn.dense,0.00008808,0.01000,1.212
66
+ 10,mlp.fc1,0.00560408,0.01000,1.617
67
+ 10,mlp.fc2,38.80376689,0.01250,8.274
68
+ 11,self_attn.q_proj,0.00201942,0.01000,1.267
69
+ 11,self_attn.k_proj,0.00193283,0.01000,1.240
70
+ 11,self_attn.v_proj,0.00134693,0.01000,1.196
71
+ 11,self_attn.dense,0.00010903,0.01000,1.279
72
+ 11,mlp.fc1,0.00545754,0.01000,1.536
73
+ 11,mlp.fc2,38.56307475,0.01250,7.950
74
+ 12,self_attn.q_proj,0.00209887,0.01000,1.200
75
+ 12,self_attn.k_proj,0.00200503,0.01000,1.235
76
+ 12,self_attn.v_proj,0.00137541,0.01000,1.226
77
+ 12,self_attn.dense,0.00008138,0.01000,1.221
78
+ 12,mlp.fc1,0.00540713,0.01000,1.525
79
+ 12,mlp.fc2,40.88318888,0.01250,8.217
80
+ 13,self_attn.q_proj,0.00200631,0.01000,1.251
81
+ 13,self_attn.k_proj,0.00194197,0.01000,1.219
82
+ 13,self_attn.v_proj,0.00134296,0.01000,1.149
83
+ 13,self_attn.dense,0.00007691,0.01000,1.237
84
+ 13,mlp.fc1,0.00549893,0.01000,1.499
85
+ 13,mlp.fc2,41.56899770,0.01250,8.045
86
+ 14,self_attn.q_proj,0.00210726,0.01000,1.282
87
+ 14,self_attn.k_proj,0.00314308,0.01000,1.203
88
+ 14,self_attn.v_proj,0.00133068,0.01000,1.242
89
+ 14,self_attn.dense,0.00009206,0.01000,1.209
90
+ 14,mlp.fc1,0.00539970,0.01000,1.479
91
+ 14,mlp.fc2,41.86838531,0.01250,8.351
92
+ 15,self_attn.q_proj,0.00199632,0.01000,1.302
93
+ 15,self_attn.k_proj,0.00192289,0.01000,1.252
94
+ 15,self_attn.v_proj,0.00126881,0.01000,1.190
95
+ 15,self_attn.dense,0.00010529,0.01000,1.233
96
+ 15,mlp.fc1,0.00512260,0.01000,1.654
97
+ 15,mlp.fc2,40.16023763,0.01250,8.171
98
+ 16,self_attn.q_proj,0.00206465,0.01000,1.269
99
+ 16,self_attn.k_proj,0.00198583,0.01000,1.272
100
+ 16,self_attn.v_proj,0.00134773,0.01000,1.218
101
+ 16,self_attn.dense,0.00011100,0.01000,1.248
102
+ 16,mlp.fc1,0.00523687,0.01000,1.613
103
+ 16,mlp.fc2,44.88688151,0.01250,8.167
104
+ 17,self_attn.q_proj,0.00197861,0.01000,1.327
105
+ 17,self_attn.k_proj,0.00189119,0.01000,1.293
106
+ 17,self_attn.v_proj,0.00124682,0.01000,1.285
107
+ 17,self_attn.dense,0.00008207,0.01000,1.278
108
+ 17,mlp.fc1,0.00489658,0.01000,1.480
109
+ 17,mlp.fc2,41.40846761,0.01250,8.204
110
+ 18,self_attn.q_proj,0.00228992,0.01000,1.315
111
+ 18,self_attn.k_proj,0.00217069,0.01000,1.268
112
+ 18,self_attn.v_proj,0.00131716,0.01000,1.255
113
+ 18,self_attn.dense,0.00008202,0.01000,1.252
114
+ 18,mlp.fc1,0.00499909,0.01000,1.578
115
+ 18,mlp.fc2,41.80710856,0.01250,8.319
116
+ 19,self_attn.q_proj,0.00216272,0.01000,1.389
117
+ 19,self_attn.k_proj,0.00209569,0.01000,1.459
118
+ 19,self_attn.v_proj,0.00134877,0.01000,1.276
119
+ 19,self_attn.dense,0.00010894,0.01000,1.240
120
+ 19,mlp.fc1,0.00535385,0.01000,1.577
121
+ 19,mlp.fc2,50.55662028,0.01250,8.463
122
+ 20,self_attn.q_proj,0.00229107,0.01000,1.262
123
+ 20,self_attn.k_proj,0.03851147,0.01000,1.216
124
+ 20,self_attn.v_proj,0.00130100,0.01000,1.126
125
+ 20,self_attn.dense,0.00012769,0.01000,1.188
126
+ 20,mlp.fc1,0.00545297,0.01000,1.534
127
+ 20,mlp.fc2,49.51214600,0.01250,8.264
128
+ 21,self_attn.q_proj,0.00218866,0.01000,1.271
129
+ 21,self_attn.k_proj,0.00204123,0.01000,1.301
130
+ 21,self_attn.v_proj,0.00156515,0.01000,1.246
131
+ 21,self_attn.dense,0.00012658,0.01000,1.226
132
+ 21,mlp.fc1,0.00584921,0.01000,1.567
133
+ 21,mlp.fc2,52.78467306,0.01250,8.395
134
+ 22,self_attn.q_proj,0.00225957,0.01000,1.327
135
+ 22,self_attn.k_proj,0.00357083,0.01000,1.294
136
+ 22,self_attn.v_proj,0.00164564,0.01000,1.182
137
+ 22,self_attn.dense,0.00010589,0.01000,1.298
138
+ 22,mlp.fc1,0.00612155,0.01000,1.662
139
+ 22,mlp.fc2,52.80789185,0.01250,8.462
140
+ 23,self_attn.q_proj,0.00243160,0.01000,1.332
141
+ 23,self_attn.k_proj,0.00276039,0.01000,1.332
142
+ 23,self_attn.v_proj,0.00159178,0.01000,1.117
143
+ 23,self_attn.dense,0.00016935,0.01000,1.254
144
+ 23,mlp.fc1,0.00641046,0.01000,1.594
145
+ 23,mlp.fc2,52.62599691,0.01250,8.410
146
+ 24,self_attn.q_proj,0.00232852,0.01000,1.257
147
+ 24,self_attn.k_proj,0.00215273,0.01000,1.328
148
+ 24,self_attn.v_proj,0.00190273,0.01000,1.293
149
+ 24,self_attn.dense,0.00019747,0.01000,1.250
150
+ 24,mlp.fc1,0.00686966,0.01000,1.517
151
+ 24,mlp.fc2,55.26352437,0.01250,8.156
152
+ 25,self_attn.q_proj,0.00253579,0.01000,1.216
153
+ 25,self_attn.k_proj,0.01635120,0.01000,1.195
154
+ 25,self_attn.v_proj,0.00198928,0.01000,1.303
155
+ 25,self_attn.dense,0.00024381,0.01000,1.295
156
+ 25,mlp.fc1,0.00749265,0.01000,1.487
157
+ 25,mlp.fc2,52.59010824,0.01250,8.389
158
+ 26,self_attn.q_proj,0.00319071,0.01000,1.357
159
+ 26,self_attn.k_proj,0.00231659,0.01000,1.326
160
+ 26,self_attn.v_proj,0.00232149,0.01000,1.287
161
+ 26,self_attn.dense,0.00024788,0.01000,1.330
162
+ 26,mlp.fc1,0.00773018,0.01000,1.644
163
+ 26,mlp.fc2,50.22942098,0.01250,8.381
164
+ 27,self_attn.q_proj,0.00275840,0.01000,1.284
165
+ 27,self_attn.k_proj,0.00304915,0.01000,1.255
166
+ 27,self_attn.v_proj,0.00238565,0.01000,1.236
167
+ 27,self_attn.dense,0.00028637,0.01000,1.232
168
+ 27,mlp.fc1,0.00808649,0.01000,1.501
169
+ 27,mlp.fc2,48.66911825,0.01250,8.210
170
+ 28,self_attn.q_proj,0.00248579,0.01000,1.194
171
+ 28,self_attn.k_proj,0.00226670,0.01000,1.181
172
+ 28,self_attn.v_proj,0.00246282,0.01000,1.183
173
+ 28,self_attn.dense,0.00041967,0.01000,1.281
174
+ 28,mlp.fc1,0.00824952,0.01000,1.527
175
+ 28,mlp.fc2,43.05557760,0.01250,8.224
176
+ 29,self_attn.q_proj,0.11153113,0.01000,1.266
177
+ 29,self_attn.k_proj,0.64342054,0.01000,1.226
178
+ 29,self_attn.v_proj,0.00479308,0.01000,1.228
179
+ 29,self_attn.dense,0.00025549,0.01000,1.224
180
+ 29,mlp.fc1,0.03276892,0.01000,1.495
181
+ 29,mlp.fc2,36.04873657,0.01250,8.129
182
+ 30,self_attn.q_proj,0.06741060,0.01000,1.441
183
+ 30,self_attn.k_proj,0.70108302,0.01000,1.260
184
+ 30,self_attn.v_proj,0.00279436,0.01000,1.327
185
+ 30,self_attn.dense,0.00033670,0.01000,1.297
186
+ 30,mlp.fc1,0.02931402,0.01000,1.595
187
+ 30,mlp.fc2,32.39466349,0.01250,8.556
188
+ 31,self_attn.q_proj,0.03431750,0.01000,1.214
189
+ 31,self_attn.k_proj,5.91425705,0.01000,1.192
190
+ 31,self_attn.v_proj,0.00139703,0.01000,1.202
191
+ 31,self_attn.dense,0.00031197,0.01000,1.166
192
+ 31,mlp.fc1,0.01573335,0.01000,1.476
193
+ 31,mlp.fc2,58.13183085,0.01250,8.144
quantize_config.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bits": 8,
3
+ "group_size": 128,
4
+ "desc_act": true,
5
+ "sym": true,
6
+ "lm_head": false,
7
+ "quant_method": "gptq",
8
+ "checkpoint_format": "gptq",
9
+ "pack_dtype": "int32",
10
+ "meta": {
11
+ "quantizer": [
12
+ "gptqmodel:2.2.0"
13
+ ],
14
+ "uri": "https://github.com/modelcloud/gptqmodel",
15
+ "damp_percent": 0.01,
16
+ "damp_auto_increment": 0.0025,
17
+ "static_groups": false,
18
+ "true_sequential": true,
19
+ "mse": 0.0
20
+ }
21
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "unk_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,325 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "50256": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "50257": {
13
+ "content": " ",
14
+ "lstrip": false,
15
+ "normalized": true,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": false
19
+ },
20
+ "50258": {
21
+ "content": " ",
22
+ "lstrip": false,
23
+ "normalized": true,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": false
27
+ },
28
+ "50259": {
29
+ "content": " ",
30
+ "lstrip": false,
31
+ "normalized": true,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": false
35
+ },
36
+ "50260": {
37
+ "content": " ",
38
+ "lstrip": false,
39
+ "normalized": true,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": false
43
+ },
44
+ "50261": {
45
+ "content": " ",
46
+ "lstrip": false,
47
+ "normalized": true,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": false
51
+ },
52
+ "50262": {
53
+ "content": " ",
54
+ "lstrip": false,
55
+ "normalized": true,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": false
59
+ },
60
+ "50263": {
61
+ "content": " ",
62
+ "lstrip": false,
63
+ "normalized": true,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": false
67
+ },
68
+ "50264": {
69
+ "content": " ",
70
+ "lstrip": false,
71
+ "normalized": true,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": false
75
+ },
76
+ "50265": {
77
+ "content": " ",
78
+ "lstrip": false,
79
+ "normalized": true,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": false
83
+ },
84
+ "50266": {
85
+ "content": " ",
86
+ "lstrip": false,
87
+ "normalized": true,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": false
91
+ },
92
+ "50267": {
93
+ "content": " ",
94
+ "lstrip": false,
95
+ "normalized": true,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": false
99
+ },
100
+ "50268": {
101
+ "content": " ",
102
+ "lstrip": false,
103
+ "normalized": true,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": false
107
+ },
108
+ "50269": {
109
+ "content": " ",
110
+ "lstrip": false,
111
+ "normalized": true,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": false
115
+ },
116
+ "50270": {
117
+ "content": " ",
118
+ "lstrip": false,
119
+ "normalized": true,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": false
123
+ },
124
+ "50271": {
125
+ "content": " ",
126
+ "lstrip": false,
127
+ "normalized": true,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": false
131
+ },
132
+ "50272": {
133
+ "content": " ",
134
+ "lstrip": false,
135
+ "normalized": true,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": false
139
+ },
140
+ "50273": {
141
+ "content": " ",
142
+ "lstrip": false,
143
+ "normalized": true,
144
+ "rstrip": false,
145
+ "single_word": false,
146
+ "special": false
147
+ },
148
+ "50274": {
149
+ "content": " ",
150
+ "lstrip": false,
151
+ "normalized": true,
152
+ "rstrip": false,
153
+ "single_word": false,
154
+ "special": false
155
+ },
156
+ "50275": {
157
+ "content": " ",
158
+ "lstrip": false,
159
+ "normalized": true,
160
+ "rstrip": false,
161
+ "single_word": false,
162
+ "special": false
163
+ },
164
+ "50276": {
165
+ "content": " ",
166
+ "lstrip": false,
167
+ "normalized": true,
168
+ "rstrip": false,
169
+ "single_word": false,
170
+ "special": false
171
+ },
172
+ "50277": {
173
+ "content": " ",
174
+ "lstrip": false,
175
+ "normalized": true,
176
+ "rstrip": false,
177
+ "single_word": false,
178
+ "special": false
179
+ },
180
+ "50278": {
181
+ "content": " ",
182
+ "lstrip": false,
183
+ "normalized": true,
184
+ "rstrip": false,
185
+ "single_word": false,
186
+ "special": false
187
+ },
188
+ "50279": {
189
+ "content": " ",
190
+ "lstrip": false,
191
+ "normalized": true,
192
+ "rstrip": false,
193
+ "single_word": false,
194
+ "special": false
195
+ },
196
+ "50280": {
197
+ "content": " ",
198
+ "lstrip": false,
199
+ "normalized": true,
200
+ "rstrip": false,
201
+ "single_word": false,
202
+ "special": false
203
+ },
204
+ "50281": {
205
+ "content": " ",
206
+ "lstrip": false,
207
+ "normalized": true,
208
+ "rstrip": false,
209
+ "single_word": false,
210
+ "special": false
211
+ },
212
+ "50282": {
213
+ "content": " ",
214
+ "lstrip": false,
215
+ "normalized": true,
216
+ "rstrip": false,
217
+ "single_word": false,
218
+ "special": false
219
+ },
220
+ "50283": {
221
+ "content": " ",
222
+ "lstrip": false,
223
+ "normalized": true,
224
+ "rstrip": false,
225
+ "single_word": false,
226
+ "special": false
227
+ },
228
+ "50284": {
229
+ "content": " ",
230
+ "lstrip": false,
231
+ "normalized": true,
232
+ "rstrip": false,
233
+ "single_word": false,
234
+ "special": false
235
+ },
236
+ "50285": {
237
+ "content": " ",
238
+ "lstrip": false,
239
+ "normalized": true,
240
+ "rstrip": false,
241
+ "single_word": false,
242
+ "special": false
243
+ },
244
+ "50286": {
245
+ "content": " ",
246
+ "lstrip": false,
247
+ "normalized": true,
248
+ "rstrip": false,
249
+ "single_word": false,
250
+ "special": false
251
+ },
252
+ "50287": {
253
+ "content": "\t\t\t\t\t\t\t\t\t",
254
+ "lstrip": false,
255
+ "normalized": true,
256
+ "rstrip": false,
257
+ "single_word": false,
258
+ "special": false
259
+ },
260
+ "50288": {
261
+ "content": "\t\t\t\t\t\t\t\t",
262
+ "lstrip": false,
263
+ "normalized": true,
264
+ "rstrip": false,
265
+ "single_word": false,
266
+ "special": false
267
+ },
268
+ "50289": {
269
+ "content": "\t\t\t\t\t\t\t",
270
+ "lstrip": false,
271
+ "normalized": true,
272
+ "rstrip": false,
273
+ "single_word": false,
274
+ "special": false
275
+ },
276
+ "50290": {
277
+ "content": "\t\t\t\t\t\t",
278
+ "lstrip": false,
279
+ "normalized": true,
280
+ "rstrip": false,
281
+ "single_word": false,
282
+ "special": false
283
+ },
284
+ "50291": {
285
+ "content": "\t\t\t\t\t",
286
+ "lstrip": false,
287
+ "normalized": true,
288
+ "rstrip": false,
289
+ "single_word": false,
290
+ "special": false
291
+ },
292
+ "50292": {
293
+ "content": "\t\t\t\t",
294
+ "lstrip": false,
295
+ "normalized": true,
296
+ "rstrip": false,
297
+ "single_word": false,
298
+ "special": false
299
+ },
300
+ "50293": {
301
+ "content": "\t\t\t",
302
+ "lstrip": false,
303
+ "normalized": true,
304
+ "rstrip": false,
305
+ "single_word": false,
306
+ "special": false
307
+ },
308
+ "50294": {
309
+ "content": "\t\t",
310
+ "lstrip": false,
311
+ "normalized": true,
312
+ "rstrip": false,
313
+ "single_word": false,
314
+ "special": false
315
+ }
316
+ },
317
+ "bos_token": "<|endoftext|>",
318
+ "clean_up_tokenization_spaces": true,
319
+ "eos_token": "<|endoftext|>",
320
+ "extra_special_tokens": {},
321
+ "model_max_length": 2048,
322
+ "return_token_type_ids": false,
323
+ "tokenizer_class": "CodeGenTokenizer",
324
+ "unk_token": "<|endoftext|>"
325
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff