hbXNov commited on
Commit
a13e38d
·
verified ·
1 Parent(s): 1cde398

Add files using upload-large-folder tool

Browse files
config.json ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_type": "silu",
3
+ "add_faster_video": false,
4
+ "add_time_instruction": false,
5
+ "alibi": false,
6
+ "alibi_bias_max": 8.0,
7
+ "architectures": [
8
+ "LlavaLladaForMaskedDiffusion"
9
+ ],
10
+ "attention_dropout": 0.0,
11
+ "attention_layer_norm": false,
12
+ "attention_layer_norm_with_affine": true,
13
+ "auto_map": {
14
+ "AutoConfig": "configuration_llada.LLaDAConfig",
15
+ "AutoModel": "modeling_llada.LLaDAModelLM",
16
+ "AutoModelForCausalLM": "modeling_llada.LLaDAModelLM"
17
+ },
18
+ "bias_for_layer_norm": false,
19
+ "block_group_size": 1,
20
+ "block_type": "llama",
21
+ "d_model": 4096,
22
+ "embedding_dropout": 0.0,
23
+ "embedding_size": 126464,
24
+ "eos_token_id": 126081,
25
+ "faster_token_stride": 10,
26
+ "flash_attention": false,
27
+ "force_sample": false,
28
+ "image_aspect_ratio": "square",
29
+ "image_crop_resolution": null,
30
+ "image_grid_pinpoints": null,
31
+ "image_split_resolution": null,
32
+ "include_bias": false,
33
+ "include_qkv_bias": false,
34
+ "init_cutoff_factor": null,
35
+ "init_device": "meta",
36
+ "init_fn": "mitchell",
37
+ "init_std": 0.02,
38
+ "input_emb_norm": false,
39
+ "layer_norm_type": "rms",
40
+ "layer_norm_with_affine": true,
41
+ "mask_token_id": 126336,
42
+ "max_sequence_length": 4096,
43
+ "mlp_hidden_size": 12288,
44
+ "mlp_ratio": 4,
45
+ "mm_hidden_size": 1152,
46
+ "mm_newline_position": "grid",
47
+ "mm_patch_merge_type": "spatial_unpad",
48
+ "mm_pooler_ratio": 2,
49
+ "mm_projector_lr": null,
50
+ "mm_projector_type": "mlp2x_gelu",
51
+ "mm_spatial_pool_mode": "bilinear",
52
+ "mm_spatial_pool_stride": null,
53
+ "mm_tunable_parts": "mm_vision_tower,mm_mlp_adapter,mm_language_model",
54
+ "mm_use_im_patch_token": false,
55
+ "mm_use_im_start_end": false,
56
+ "mm_vision_select_feature": "patch",
57
+ "mm_vision_select_layer": -2,
58
+ "mm_vision_tower": "/data/siglip-so400m-patch14-384",
59
+ "mm_vision_tower_lr": 2e-06,
60
+ "model_type": "llada",
61
+ "multi_query_attention": null,
62
+ "n_heads": 32,
63
+ "n_kv_heads": 32,
64
+ "n_layers": 32,
65
+ "pad_token_id": 126081,
66
+ "pos_skipping_range": 4096,
67
+ "precision": "amp_bf16",
68
+ "resampler_type": null,
69
+ "residual_dropout": 0.0,
70
+ "rms_norm_eps": 1e-05,
71
+ "rope": true,
72
+ "rope_full_precision": true,
73
+ "rope_theta": 500000.0,
74
+ "scale_logits": false,
75
+ "tokenizer_model_max_length": 2048,
76
+ "tokenizer_padding_side": "right",
77
+ "torch_dtype": "bfloat16",
78
+ "transformers_version": "4.50.3",
79
+ "use_cache": true,
80
+ "use_mm_proj": true,
81
+ "use_pos_skipping": false,
82
+ "vision_tower_pretrained": null,
83
+ "vocab_size": 126464,
84
+ "weight_tying": false
85
+ }
configuration_llada.py ADDED
@@ -0,0 +1,463 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ LLaDA configuration
3
+ """
4
+ from transformers import AutoConfig, PretrainedConfig
5
+
6
+ from enum import Enum
7
+ from os import PathLike
8
+ from typing import Union
9
+ from dataclasses import asdict, dataclass, field
10
+ from glob import glob
11
+ from pathlib import Path
12
+ from typing import (
13
+ Any,
14
+ Dict,
15
+ Iterable,
16
+ List,
17
+ Optional,
18
+ Tuple,
19
+ Type,
20
+ TypeVar,
21
+ Union,
22
+ cast,
23
+ )
24
+
25
+
26
+ __all__ = [
27
+ "ActivationType",
28
+ "ActivationCheckpointingStrategy",
29
+ "BlockType",
30
+ "LayerNormType",
31
+ "InitFnType",
32
+ "ModelConfig",
33
+ ]
34
+
35
+ PathOrStr = Union[str, PathLike]
36
+
37
+
38
+ class StrEnum(str, Enum):
39
+ """
40
+ This is equivalent to Python's :class:`enum.StrEnum` since version 3.11.
41
+ We include this here for compatibility with older version of Python.
42
+ """
43
+
44
+ def __str__(self) -> str:
45
+ return self.value
46
+
47
+ def __repr__(self) -> str:
48
+ return f"'{str(self)}'"
49
+
50
+
51
+ class LayerNormType(StrEnum):
52
+ default = "default"
53
+ """
54
+ The default LayerNorm implementation, equivalent to PyTorch's built-in version.
55
+ """
56
+
57
+ low_precision = "low_precision"
58
+ """
59
+ A low-precision version of the default LayerNorm.
60
+ """
61
+
62
+ rms = "rms"
63
+ """
64
+ An RMSNorm implementation. When using ``torch.compile`` this is
65
+ probably the fastest implementation.
66
+ """
67
+
68
+ gemma_rms = "gemma_rms"
69
+ """
70
+ An RMSNorm implementation by gemmma. When using ``torch.compile`` this is
71
+ probably the fastest implementation.
72
+ """
73
+
74
+ amd_compatible = "amd_compatible"
75
+ """
76
+ LayerNorm implemented manually to work around an issue with ROCm.
77
+ """
78
+
79
+
80
+ class ActivationType(StrEnum):
81
+ gelu = "gelu"
82
+ relu = "relu"
83
+ silu = "silu"
84
+ swiglu = "swiglu"
85
+
86
+
87
+ class BlockType(StrEnum):
88
+ sequential = "sequential"
89
+ parallel = "parallel"
90
+
91
+ llama = "llama"
92
+ """
93
+ A block similar to the sequential block with slightly different
94
+ implementations of operations like attention to imitate the behavior of Llama.
95
+ """
96
+
97
+
98
+ class InitFnType(StrEnum):
99
+ mitchell = "mitchell"
100
+ """
101
+ The strategy suggested to us by Mitchell Wortsman from UW.
102
+ This uses a truncated normal distribution with an adaptive standard deviation that depends
103
+ on the size of the weights as well as the depth of the layer.
104
+ """
105
+
106
+ normal = "normal"
107
+ """
108
+ All weights are initialized from the same normal distribution.
109
+ """
110
+
111
+ kaiming_normal = "kaiming_normal"
112
+ """
113
+ All weights are initialized with the Kaiming method from a normal distribution.
114
+ Note this currently won't work with FSDP.
115
+ """
116
+
117
+ fan_in = "fan_in"
118
+ """
119
+ "Fan-in variance scaling", i.e. normal with a standard deviation of ``1/sqrt(d_in)`` where ``d_in``
120
+ is the input dimensionality of the kernel.
121
+ """
122
+
123
+ full_megatron = "full_megatron"
124
+ """
125
+ This is what metaseq calls "full megatron init". It is the init used for Llama 2.
126
+ """
127
+
128
+
129
+ @dataclass
130
+ class ModelConfig():
131
+ """
132
+ LLaDA (model) configuration.
133
+ """
134
+
135
+ # Note that the defaults for these attributes are equivalent to the base GPT2 model.
136
+
137
+ d_model: int = 768
138
+ """
139
+ The hidden size of the model.
140
+ """
141
+
142
+ n_heads: int = 12
143
+ """
144
+ The number of self-attention heads.
145
+ """
146
+
147
+ n_kv_heads: Optional[int] = None
148
+ """
149
+ The number of heads to use for keys and values. Defaults to `n_heads`.
150
+ Set this to ``None`` or ``n_heads`` for normal multi-head attention.
151
+ Set this to 1 for multi-query attention.
152
+ Set it to some in-between value for Llama2-style grouped query attention.
153
+ """
154
+
155
+ n_layers: int = 12
156
+ """
157
+ The number of layers/blocks.
158
+ """
159
+
160
+ mlp_ratio: int = 4
161
+ """
162
+ The ratio of the inner MLP dimensionality to ``d_model``.
163
+ This is only used when ``mlp_hidden_size`` is not set.
164
+ """
165
+
166
+ mlp_hidden_size: Optional[int] = None
167
+ """
168
+ Set the exact hidden size for the MLP. Otherwise the inner MLP hidden size will be set to `mlp_ratio * d_model`.
169
+ """
170
+
171
+ activation_type: ActivationType = ActivationType.swiglu
172
+ """
173
+ The activation function to use within the MLP layers.
174
+ """
175
+
176
+ block_type: BlockType = BlockType.sequential
177
+ """
178
+ The transformer block implementation.
179
+ """
180
+
181
+ block_group_size: int = 1
182
+ """
183
+ The number of blocks to group together into a single parent block.
184
+ This has no affect on the number of parameters in the model and is only used to wrap groups
185
+ of blocks together with a single FSDP wrapper during training.
186
+ """
187
+
188
+ alibi: bool = False
189
+ """
190
+ If ``True``, use ALiBi embeddings. Mutually exclusive with ``rope``.
191
+ """
192
+
193
+ alibi_bias_max: float = 8.0
194
+ """
195
+ Maximum absolute value of ALiBi bias.
196
+ """
197
+
198
+ rope: bool = False
199
+ """
200
+ Use rotary positional embeddings (RoPE). Mutually exclusive with ``alibi``.
201
+ """
202
+
203
+ rope_full_precision: bool = True
204
+ """
205
+ If ``True``, apply RoPE embeddings at full precision regardless of the input type. Otherwise,
206
+ apply RoPE at the precision of the input.
207
+ """
208
+
209
+ flash_attention: bool = False
210
+ """
211
+ If ``True``, use ``FlashAttention``.
212
+ """
213
+
214
+ attention_dropout: float = 0.1
215
+ """
216
+ The dropout probability within the attention modules.
217
+ """
218
+
219
+ multi_query_attention: Optional[bool] = None
220
+ """
221
+ Use the Multi-Query formulation of attention used in PaLM. This reduces the number of parameters
222
+ and is more efficient during inference.
223
+ """
224
+
225
+ attention_layer_norm: bool = False
226
+ """
227
+ Apply layer norm to the keys and queries within the attention mechanism.
228
+ This can help stabilize training.
229
+ """
230
+
231
+ residual_dropout: float = 0.1
232
+ """
233
+ The dropout probability for the MLP and attention output within each block.
234
+ """
235
+
236
+ embedding_dropout: float = 0.1
237
+ """
238
+ The dropout probability for embeddings.
239
+ """
240
+
241
+ input_emb_norm: bool = False
242
+ """
243
+ An input hidden_states norm implementation by gemmma.
244
+ """
245
+
246
+ layer_norm_type: LayerNormType = LayerNormType.default
247
+ """
248
+ The layernorm implementation to use.
249
+ """
250
+
251
+ layer_norm_with_affine: bool = True
252
+ """
253
+ Whether to include bias and weight parameters for the layer norms.
254
+ This only affects layer norms that are immediately followed by a linear layer in the forward pass,
255
+ so everything except QK-norms. To turn off affines for QK norms as well, set :attr:`attention_layer_norm_with_affine`
256
+ to ``False``.
257
+ """
258
+
259
+ rms_norm_eps: float = 1e-05
260
+ """
261
+ The rms layernorm eps param.
262
+ """
263
+
264
+ attention_layer_norm_with_affine: bool = True
265
+ """
266
+ Toggle affine transform for the QK norms.
267
+ """
268
+
269
+ max_sequence_length: int = 1024
270
+ """
271
+ The maximum input sequence length supported by the model.
272
+ """
273
+
274
+ rope_theta: float = 10000.0
275
+ """
276
+ The rope base param.
277
+ """
278
+
279
+ include_qkv_bias: Optional[bool] = False
280
+ """
281
+ Whether or not to include bias parameters in qkv linear layers.
282
+ """
283
+
284
+ include_bias: bool = False
285
+ """
286
+ Whether or not to include bias parameters in linear layers.
287
+ In PaLM, they got rid of all bias terms because they found that large
288
+ models tend to have near 0 bias terms anyway.
289
+ """
290
+
291
+ bias_for_layer_norm: Optional[bool] = None
292
+ """
293
+ Whether or not to include bias parameters in layer norm.
294
+ This is separate from the include_bias parameter, because of a ROCm crash when biases are disabled in
295
+ layer norm.
296
+ When this is None (the default), it inherits the setting from include_bias.
297
+ """
298
+
299
+ scale_logits: bool = False
300
+ """
301
+ If ``True``, scale the output logits by ``1 / sqrt(d_model)``.
302
+ """
303
+
304
+ vocab_size: int = 50257
305
+ """
306
+ Vocabulary size of the model.
307
+ """
308
+
309
+ embedding_size: Optional[int] = 50304
310
+ """
311
+ The number of embeddings, i.e. the number of tokens. If set to ``None`` it will default
312
+ to ``vocab_size``. If ``vocab_size`` is not a multiple of 128, setting this to the
313
+ next multiple of 128 that's greater than ``vocab_size`` can improve throughput
314
+ substantially.
315
+ """
316
+
317
+ weight_tying: bool = True
318
+ """
319
+ Whether to tie output linear weights to the input embedding.
320
+ """
321
+
322
+ eos_token_id: int = 50256
323
+ """
324
+ The ID of the end-of-sentence special token.
325
+ """
326
+
327
+ pad_token_id: int = 50256
328
+ """
329
+ The ID of the token to use for padding. Defaults to the ID of the EOS token.
330
+ """
331
+
332
+ mask_token_id: Optional[int] = 50256
333
+ """
334
+ The ID of the token to use for mask token. Defaults to the ID of the EOS token.
335
+ """
336
+
337
+ init_device: Optional[str] = None
338
+ """
339
+ The torch device to use when initializing the model parameters, e.g. "cpu", "cuda:0", "meta".
340
+ """
341
+
342
+ init_fn: InitFnType = InitFnType.normal
343
+ """
344
+ The weight initialization strategy.
345
+ """
346
+
347
+ init_std: float = 0.02
348
+ """
349
+ The standard deviation to use when initializing weights with a "fixed distribution" ``init_fn``, such
350
+ as "normal".
351
+ """
352
+
353
+ init_cutoff_factor: Optional[float] = None
354
+ """
355
+ A positive factor used to scale the cutoff values when initializing weights with a "fixed distribution" ``init_fn``, such
356
+ as "normal". Setting this to None means values are not cutoff.
357
+ """
358
+
359
+ precision: Optional[str] = None
360
+ """
361
+ Precision used to train/evaluate with. You shouldn't set this directly.
362
+ See :data:`TrainConfig.precision` instead.
363
+ """
364
+
365
+ @property
366
+ def effective_n_kv_heads(self) -> int:
367
+ if self.n_kv_heads is None:
368
+ if self.multi_query_attention is True:
369
+ return 1
370
+ else:
371
+ return self.n_heads
372
+ else:
373
+ if self.multi_query_attention is None:
374
+ return self.n_kv_heads
375
+ if self.multi_query_attention:
376
+ n_kv_heads_should_be = 1
377
+ else:
378
+ n_kv_heads_should_be = self.n_heads
379
+ if self.n_kv_heads == n_kv_heads_should_be:
380
+ return n_kv_heads_should_be
381
+ else:
382
+ raise Exception(
383
+ "You can't set `multi_query_attention` and `n_kv_heads` at the same time."
384
+ )
385
+
386
+ class ActivationCheckpointingStrategy(StrEnum):
387
+ whole_layer = "whole_layer"
388
+ """
389
+ Checkpoint every transformer layer.
390
+ """
391
+
392
+ one_in_two = "one_in_two"
393
+ """
394
+ Checkpoint one in two transformer layers.
395
+ """
396
+
397
+ one_in_three = "one_in_three"
398
+ """
399
+ Checkpoint one in three transformer layers.
400
+ """
401
+
402
+ one_in_four = "one_in_four"
403
+ """
404
+ Checkpoint one in four transformer layers.
405
+ """
406
+
407
+ two_in_three = "two_in_three"
408
+ """
409
+ Checkpoint two out of every three transformer layers.
410
+ """
411
+
412
+ three_in_four = "three_in_four"
413
+ """
414
+ Checkpoint three out of four of every transformer layers.
415
+ """
416
+
417
+ four_in_five = "four_in_five"
418
+ """
419
+ Checkpoint four out of five of every transformer layers.
420
+ """
421
+
422
+ nine_in_ten = "nine_in_ten"
423
+ """
424
+ Checkpoint nine out of ten of every transformer layers.
425
+ """
426
+
427
+ fine_grained = "fine_grained"
428
+ """
429
+ Focus checkpointing on where it is cheap to recompute and saves most memory.
430
+ """
431
+
432
+
433
+ class LLaDAConfig(PretrainedConfig):
434
+ model_type = "llada"
435
+ keys_to_ignore_at_inference = ["past_key_values"] # TODO: confirm
436
+
437
+ def __init__(self, use_cache: bool = False, **kwargs):
438
+ model_config = ModelConfig()
439
+ all_kwargs = model_config.__dict__
440
+ all_kwargs.update(kwargs)
441
+ all_kwargs.update({"use_cache": use_cache})
442
+ all_kwargs.update(
443
+ {
444
+ "architectures": all_kwargs.get("architectures", ["LLaDAModelLM"])
445
+ }
446
+ )
447
+ super().__init__(**all_kwargs)
448
+
449
+ @property
450
+ def num_attention_heads(self):
451
+ return self.n_heads
452
+
453
+ @property
454
+ def num_hidden_layers(self):
455
+ return self.n_layers
456
+
457
+ @property
458
+ def hidden_size(self):
459
+ return self.d_model
460
+
461
+
462
+ # Register the config class so that it is available for transformer pipelines, auto-loading etc.
463
+ AutoConfig.register("llada", LLaDAConfig)
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 126080,
4
+ "eos_token_id": 126081,
5
+ "transformers_version": "4.50.3"
6
+ }
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ff076387fed310be2055905e7c3141f4218bfaf3b4c5c2da1594b539fa7a6d8
3
+ size 4995589944
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03b3979a8abbdc8bce258ea67cf9d97cee2e21e07a00790b3bc2008a178c09d9
3
+ size 4999819552
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb706f8125d1c0a6871f49fca65651001f5214928810768e7580fb4bae13c561
3
+ size 4999802728
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6b1306483e24248a2ebb355f7a5647987505d937b3784786c3a935e4ea91243
3
+ size 1874563264
model.safetensors.index.json ADDED
@@ -0,0 +1,724 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 16869674048
4
+ },
5
+ "weight_map": {
6
+ "model.image_newline": "model-00001-of-00004.safetensors",
7
+ "model.mm_projector.0.bias": "model-00004-of-00004.safetensors",
8
+ "model.mm_projector.0.weight": "model-00004-of-00004.safetensors",
9
+ "model.mm_projector.2.bias": "model-00004-of-00004.safetensors",
10
+ "model.mm_projector.2.weight": "model-00004-of-00004.safetensors",
11
+ "model.transformer.blocks.0.attn_norm.weight": "model-00001-of-00004.safetensors",
12
+ "model.transformer.blocks.0.attn_out.weight": "model-00001-of-00004.safetensors",
13
+ "model.transformer.blocks.0.ff_norm.weight": "model-00001-of-00004.safetensors",
14
+ "model.transformer.blocks.0.ff_out.weight": "model-00001-of-00004.safetensors",
15
+ "model.transformer.blocks.0.ff_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.transformer.blocks.0.k_proj.weight": "model-00001-of-00004.safetensors",
17
+ "model.transformer.blocks.0.q_proj.weight": "model-00001-of-00004.safetensors",
18
+ "model.transformer.blocks.0.up_proj.weight": "model-00001-of-00004.safetensors",
19
+ "model.transformer.blocks.0.v_proj.weight": "model-00001-of-00004.safetensors",
20
+ "model.transformer.blocks.1.attn_norm.weight": "model-00001-of-00004.safetensors",
21
+ "model.transformer.blocks.1.attn_out.weight": "model-00001-of-00004.safetensors",
22
+ "model.transformer.blocks.1.ff_norm.weight": "model-00001-of-00004.safetensors",
23
+ "model.transformer.blocks.1.ff_out.weight": "model-00001-of-00004.safetensors",
24
+ "model.transformer.blocks.1.ff_proj.weight": "model-00001-of-00004.safetensors",
25
+ "model.transformer.blocks.1.k_proj.weight": "model-00001-of-00004.safetensors",
26
+ "model.transformer.blocks.1.q_proj.weight": "model-00001-of-00004.safetensors",
27
+ "model.transformer.blocks.1.up_proj.weight": "model-00001-of-00004.safetensors",
28
+ "model.transformer.blocks.1.v_proj.weight": "model-00001-of-00004.safetensors",
29
+ "model.transformer.blocks.10.attn_norm.weight": "model-00002-of-00004.safetensors",
30
+ "model.transformer.blocks.10.attn_out.weight": "model-00002-of-00004.safetensors",
31
+ "model.transformer.blocks.10.ff_norm.weight": "model-00002-of-00004.safetensors",
32
+ "model.transformer.blocks.10.ff_out.weight": "model-00002-of-00004.safetensors",
33
+ "model.transformer.blocks.10.ff_proj.weight": "model-00002-of-00004.safetensors",
34
+ "model.transformer.blocks.10.k_proj.weight": "model-00002-of-00004.safetensors",
35
+ "model.transformer.blocks.10.q_proj.weight": "model-00002-of-00004.safetensors",
36
+ "model.transformer.blocks.10.up_proj.weight": "model-00002-of-00004.safetensors",
37
+ "model.transformer.blocks.10.v_proj.weight": "model-00002-of-00004.safetensors",
38
+ "model.transformer.blocks.11.attn_norm.weight": "model-00002-of-00004.safetensors",
39
+ "model.transformer.blocks.11.attn_out.weight": "model-00002-of-00004.safetensors",
40
+ "model.transformer.blocks.11.ff_norm.weight": "model-00002-of-00004.safetensors",
41
+ "model.transformer.blocks.11.ff_out.weight": "model-00002-of-00004.safetensors",
42
+ "model.transformer.blocks.11.ff_proj.weight": "model-00002-of-00004.safetensors",
43
+ "model.transformer.blocks.11.k_proj.weight": "model-00002-of-00004.safetensors",
44
+ "model.transformer.blocks.11.q_proj.weight": "model-00002-of-00004.safetensors",
45
+ "model.transformer.blocks.11.up_proj.weight": "model-00002-of-00004.safetensors",
46
+ "model.transformer.blocks.11.v_proj.weight": "model-00002-of-00004.safetensors",
47
+ "model.transformer.blocks.12.attn_norm.weight": "model-00002-of-00004.safetensors",
48
+ "model.transformer.blocks.12.attn_out.weight": "model-00002-of-00004.safetensors",
49
+ "model.transformer.blocks.12.ff_norm.weight": "model-00002-of-00004.safetensors",
50
+ "model.transformer.blocks.12.ff_out.weight": "model-00002-of-00004.safetensors",
51
+ "model.transformer.blocks.12.ff_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.transformer.blocks.12.k_proj.weight": "model-00002-of-00004.safetensors",
53
+ "model.transformer.blocks.12.q_proj.weight": "model-00002-of-00004.safetensors",
54
+ "model.transformer.blocks.12.up_proj.weight": "model-00002-of-00004.safetensors",
55
+ "model.transformer.blocks.12.v_proj.weight": "model-00002-of-00004.safetensors",
56
+ "model.transformer.blocks.13.attn_norm.weight": "model-00002-of-00004.safetensors",
57
+ "model.transformer.blocks.13.attn_out.weight": "model-00002-of-00004.safetensors",
58
+ "model.transformer.blocks.13.ff_norm.weight": "model-00002-of-00004.safetensors",
59
+ "model.transformer.blocks.13.ff_out.weight": "model-00002-of-00004.safetensors",
60
+ "model.transformer.blocks.13.ff_proj.weight": "model-00002-of-00004.safetensors",
61
+ "model.transformer.blocks.13.k_proj.weight": "model-00002-of-00004.safetensors",
62
+ "model.transformer.blocks.13.q_proj.weight": "model-00002-of-00004.safetensors",
63
+ "model.transformer.blocks.13.up_proj.weight": "model-00002-of-00004.safetensors",
64
+ "model.transformer.blocks.13.v_proj.weight": "model-00002-of-00004.safetensors",
65
+ "model.transformer.blocks.14.attn_norm.weight": "model-00002-of-00004.safetensors",
66
+ "model.transformer.blocks.14.attn_out.weight": "model-00002-of-00004.safetensors",
67
+ "model.transformer.blocks.14.ff_norm.weight": "model-00002-of-00004.safetensors",
68
+ "model.transformer.blocks.14.ff_out.weight": "model-00002-of-00004.safetensors",
69
+ "model.transformer.blocks.14.ff_proj.weight": "model-00002-of-00004.safetensors",
70
+ "model.transformer.blocks.14.k_proj.weight": "model-00002-of-00004.safetensors",
71
+ "model.transformer.blocks.14.q_proj.weight": "model-00002-of-00004.safetensors",
72
+ "model.transformer.blocks.14.up_proj.weight": "model-00002-of-00004.safetensors",
73
+ "model.transformer.blocks.14.v_proj.weight": "model-00002-of-00004.safetensors",
74
+ "model.transformer.blocks.15.attn_norm.weight": "model-00002-of-00004.safetensors",
75
+ "model.transformer.blocks.15.attn_out.weight": "model-00002-of-00004.safetensors",
76
+ "model.transformer.blocks.15.ff_norm.weight": "model-00002-of-00004.safetensors",
77
+ "model.transformer.blocks.15.ff_out.weight": "model-00002-of-00004.safetensors",
78
+ "model.transformer.blocks.15.ff_proj.weight": "model-00002-of-00004.safetensors",
79
+ "model.transformer.blocks.15.k_proj.weight": "model-00002-of-00004.safetensors",
80
+ "model.transformer.blocks.15.q_proj.weight": "model-00002-of-00004.safetensors",
81
+ "model.transformer.blocks.15.up_proj.weight": "model-00002-of-00004.safetensors",
82
+ "model.transformer.blocks.15.v_proj.weight": "model-00002-of-00004.safetensors",
83
+ "model.transformer.blocks.16.attn_norm.weight": "model-00002-of-00004.safetensors",
84
+ "model.transformer.blocks.16.attn_out.weight": "model-00002-of-00004.safetensors",
85
+ "model.transformer.blocks.16.ff_norm.weight": "model-00002-of-00004.safetensors",
86
+ "model.transformer.blocks.16.ff_out.weight": "model-00002-of-00004.safetensors",
87
+ "model.transformer.blocks.16.ff_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.transformer.blocks.16.k_proj.weight": "model-00002-of-00004.safetensors",
89
+ "model.transformer.blocks.16.q_proj.weight": "model-00002-of-00004.safetensors",
90
+ "model.transformer.blocks.16.up_proj.weight": "model-00002-of-00004.safetensors",
91
+ "model.transformer.blocks.16.v_proj.weight": "model-00002-of-00004.safetensors",
92
+ "model.transformer.blocks.17.attn_norm.weight": "model-00002-of-00004.safetensors",
93
+ "model.transformer.blocks.17.attn_out.weight": "model-00002-of-00004.safetensors",
94
+ "model.transformer.blocks.17.ff_norm.weight": "model-00002-of-00004.safetensors",
95
+ "model.transformer.blocks.17.ff_out.weight": "model-00002-of-00004.safetensors",
96
+ "model.transformer.blocks.17.ff_proj.weight": "model-00002-of-00004.safetensors",
97
+ "model.transformer.blocks.17.k_proj.weight": "model-00002-of-00004.safetensors",
98
+ "model.transformer.blocks.17.q_proj.weight": "model-00002-of-00004.safetensors",
99
+ "model.transformer.blocks.17.up_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.transformer.blocks.17.v_proj.weight": "model-00002-of-00004.safetensors",
101
+ "model.transformer.blocks.18.attn_norm.weight": "model-00002-of-00004.safetensors",
102
+ "model.transformer.blocks.18.attn_out.weight": "model-00002-of-00004.safetensors",
103
+ "model.transformer.blocks.18.ff_norm.weight": "model-00002-of-00004.safetensors",
104
+ "model.transformer.blocks.18.ff_out.weight": "model-00002-of-00004.safetensors",
105
+ "model.transformer.blocks.18.ff_proj.weight": "model-00002-of-00004.safetensors",
106
+ "model.transformer.blocks.18.k_proj.weight": "model-00002-of-00004.safetensors",
107
+ "model.transformer.blocks.18.q_proj.weight": "model-00002-of-00004.safetensors",
108
+ "model.transformer.blocks.18.up_proj.weight": "model-00002-of-00004.safetensors",
109
+ "model.transformer.blocks.18.v_proj.weight": "model-00002-of-00004.safetensors",
110
+ "model.transformer.blocks.19.attn_norm.weight": "model-00002-of-00004.safetensors",
111
+ "model.transformer.blocks.19.attn_out.weight": "model-00002-of-00004.safetensors",
112
+ "model.transformer.blocks.19.ff_norm.weight": "model-00002-of-00004.safetensors",
113
+ "model.transformer.blocks.19.ff_out.weight": "model-00002-of-00004.safetensors",
114
+ "model.transformer.blocks.19.ff_proj.weight": "model-00002-of-00004.safetensors",
115
+ "model.transformer.blocks.19.k_proj.weight": "model-00002-of-00004.safetensors",
116
+ "model.transformer.blocks.19.q_proj.weight": "model-00002-of-00004.safetensors",
117
+ "model.transformer.blocks.19.up_proj.weight": "model-00002-of-00004.safetensors",
118
+ "model.transformer.blocks.19.v_proj.weight": "model-00002-of-00004.safetensors",
119
+ "model.transformer.blocks.2.attn_norm.weight": "model-00001-of-00004.safetensors",
120
+ "model.transformer.blocks.2.attn_out.weight": "model-00001-of-00004.safetensors",
121
+ "model.transformer.blocks.2.ff_norm.weight": "model-00001-of-00004.safetensors",
122
+ "model.transformer.blocks.2.ff_out.weight": "model-00001-of-00004.safetensors",
123
+ "model.transformer.blocks.2.ff_proj.weight": "model-00001-of-00004.safetensors",
124
+ "model.transformer.blocks.2.k_proj.weight": "model-00001-of-00004.safetensors",
125
+ "model.transformer.blocks.2.q_proj.weight": "model-00001-of-00004.safetensors",
126
+ "model.transformer.blocks.2.up_proj.weight": "model-00001-of-00004.safetensors",
127
+ "model.transformer.blocks.2.v_proj.weight": "model-00001-of-00004.safetensors",
128
+ "model.transformer.blocks.20.attn_norm.weight": "model-00002-of-00004.safetensors",
129
+ "model.transformer.blocks.20.attn_out.weight": "model-00002-of-00004.safetensors",
130
+ "model.transformer.blocks.20.ff_norm.weight": "model-00002-of-00004.safetensors",
131
+ "model.transformer.blocks.20.ff_out.weight": "model-00002-of-00004.safetensors",
132
+ "model.transformer.blocks.20.ff_proj.weight": "model-00003-of-00004.safetensors",
133
+ "model.transformer.blocks.20.k_proj.weight": "model-00002-of-00004.safetensors",
134
+ "model.transformer.blocks.20.q_proj.weight": "model-00002-of-00004.safetensors",
135
+ "model.transformer.blocks.20.up_proj.weight": "model-00003-of-00004.safetensors",
136
+ "model.transformer.blocks.20.v_proj.weight": "model-00002-of-00004.safetensors",
137
+ "model.transformer.blocks.21.attn_norm.weight": "model-00003-of-00004.safetensors",
138
+ "model.transformer.blocks.21.attn_out.weight": "model-00003-of-00004.safetensors",
139
+ "model.transformer.blocks.21.ff_norm.weight": "model-00003-of-00004.safetensors",
140
+ "model.transformer.blocks.21.ff_out.weight": "model-00003-of-00004.safetensors",
141
+ "model.transformer.blocks.21.ff_proj.weight": "model-00003-of-00004.safetensors",
142
+ "model.transformer.blocks.21.k_proj.weight": "model-00003-of-00004.safetensors",
143
+ "model.transformer.blocks.21.q_proj.weight": "model-00003-of-00004.safetensors",
144
+ "model.transformer.blocks.21.up_proj.weight": "model-00003-of-00004.safetensors",
145
+ "model.transformer.blocks.21.v_proj.weight": "model-00003-of-00004.safetensors",
146
+ "model.transformer.blocks.22.attn_norm.weight": "model-00003-of-00004.safetensors",
147
+ "model.transformer.blocks.22.attn_out.weight": "model-00003-of-00004.safetensors",
148
+ "model.transformer.blocks.22.ff_norm.weight": "model-00003-of-00004.safetensors",
149
+ "model.transformer.blocks.22.ff_out.weight": "model-00003-of-00004.safetensors",
150
+ "model.transformer.blocks.22.ff_proj.weight": "model-00003-of-00004.safetensors",
151
+ "model.transformer.blocks.22.k_proj.weight": "model-00003-of-00004.safetensors",
152
+ "model.transformer.blocks.22.q_proj.weight": "model-00003-of-00004.safetensors",
153
+ "model.transformer.blocks.22.up_proj.weight": "model-00003-of-00004.safetensors",
154
+ "model.transformer.blocks.22.v_proj.weight": "model-00003-of-00004.safetensors",
155
+ "model.transformer.blocks.23.attn_norm.weight": "model-00003-of-00004.safetensors",
156
+ "model.transformer.blocks.23.attn_out.weight": "model-00003-of-00004.safetensors",
157
+ "model.transformer.blocks.23.ff_norm.weight": "model-00003-of-00004.safetensors",
158
+ "model.transformer.blocks.23.ff_out.weight": "model-00003-of-00004.safetensors",
159
+ "model.transformer.blocks.23.ff_proj.weight": "model-00003-of-00004.safetensors",
160
+ "model.transformer.blocks.23.k_proj.weight": "model-00003-of-00004.safetensors",
161
+ "model.transformer.blocks.23.q_proj.weight": "model-00003-of-00004.safetensors",
162
+ "model.transformer.blocks.23.up_proj.weight": "model-00003-of-00004.safetensors",
163
+ "model.transformer.blocks.23.v_proj.weight": "model-00003-of-00004.safetensors",
164
+ "model.transformer.blocks.24.attn_norm.weight": "model-00003-of-00004.safetensors",
165
+ "model.transformer.blocks.24.attn_out.weight": "model-00003-of-00004.safetensors",
166
+ "model.transformer.blocks.24.ff_norm.weight": "model-00003-of-00004.safetensors",
167
+ "model.transformer.blocks.24.ff_out.weight": "model-00003-of-00004.safetensors",
168
+ "model.transformer.blocks.24.ff_proj.weight": "model-00003-of-00004.safetensors",
169
+ "model.transformer.blocks.24.k_proj.weight": "model-00003-of-00004.safetensors",
170
+ "model.transformer.blocks.24.q_proj.weight": "model-00003-of-00004.safetensors",
171
+ "model.transformer.blocks.24.up_proj.weight": "model-00003-of-00004.safetensors",
172
+ "model.transformer.blocks.24.v_proj.weight": "model-00003-of-00004.safetensors",
173
+ "model.transformer.blocks.25.attn_norm.weight": "model-00003-of-00004.safetensors",
174
+ "model.transformer.blocks.25.attn_out.weight": "model-00003-of-00004.safetensors",
175
+ "model.transformer.blocks.25.ff_norm.weight": "model-00003-of-00004.safetensors",
176
+ "model.transformer.blocks.25.ff_out.weight": "model-00003-of-00004.safetensors",
177
+ "model.transformer.blocks.25.ff_proj.weight": "model-00003-of-00004.safetensors",
178
+ "model.transformer.blocks.25.k_proj.weight": "model-00003-of-00004.safetensors",
179
+ "model.transformer.blocks.25.q_proj.weight": "model-00003-of-00004.safetensors",
180
+ "model.transformer.blocks.25.up_proj.weight": "model-00003-of-00004.safetensors",
181
+ "model.transformer.blocks.25.v_proj.weight": "model-00003-of-00004.safetensors",
182
+ "model.transformer.blocks.26.attn_norm.weight": "model-00003-of-00004.safetensors",
183
+ "model.transformer.blocks.26.attn_out.weight": "model-00003-of-00004.safetensors",
184
+ "model.transformer.blocks.26.ff_norm.weight": "model-00003-of-00004.safetensors",
185
+ "model.transformer.blocks.26.ff_out.weight": "model-00003-of-00004.safetensors",
186
+ "model.transformer.blocks.26.ff_proj.weight": "model-00003-of-00004.safetensors",
187
+ "model.transformer.blocks.26.k_proj.weight": "model-00003-of-00004.safetensors",
188
+ "model.transformer.blocks.26.q_proj.weight": "model-00003-of-00004.safetensors",
189
+ "model.transformer.blocks.26.up_proj.weight": "model-00003-of-00004.safetensors",
190
+ "model.transformer.blocks.26.v_proj.weight": "model-00003-of-00004.safetensors",
191
+ "model.transformer.blocks.27.attn_norm.weight": "model-00003-of-00004.safetensors",
192
+ "model.transformer.blocks.27.attn_out.weight": "model-00003-of-00004.safetensors",
193
+ "model.transformer.blocks.27.ff_norm.weight": "model-00003-of-00004.safetensors",
194
+ "model.transformer.blocks.27.ff_out.weight": "model-00003-of-00004.safetensors",
195
+ "model.transformer.blocks.27.ff_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.transformer.blocks.27.k_proj.weight": "model-00003-of-00004.safetensors",
197
+ "model.transformer.blocks.27.q_proj.weight": "model-00003-of-00004.safetensors",
198
+ "model.transformer.blocks.27.up_proj.weight": "model-00003-of-00004.safetensors",
199
+ "model.transformer.blocks.27.v_proj.weight": "model-00003-of-00004.safetensors",
200
+ "model.transformer.blocks.28.attn_norm.weight": "model-00003-of-00004.safetensors",
201
+ "model.transformer.blocks.28.attn_out.weight": "model-00003-of-00004.safetensors",
202
+ "model.transformer.blocks.28.ff_norm.weight": "model-00003-of-00004.safetensors",
203
+ "model.transformer.blocks.28.ff_out.weight": "model-00003-of-00004.safetensors",
204
+ "model.transformer.blocks.28.ff_proj.weight": "model-00003-of-00004.safetensors",
205
+ "model.transformer.blocks.28.k_proj.weight": "model-00003-of-00004.safetensors",
206
+ "model.transformer.blocks.28.q_proj.weight": "model-00003-of-00004.safetensors",
207
+ "model.transformer.blocks.28.up_proj.weight": "model-00003-of-00004.safetensors",
208
+ "model.transformer.blocks.28.v_proj.weight": "model-00003-of-00004.safetensors",
209
+ "model.transformer.blocks.29.attn_norm.weight": "model-00003-of-00004.safetensors",
210
+ "model.transformer.blocks.29.attn_out.weight": "model-00003-of-00004.safetensors",
211
+ "model.transformer.blocks.29.ff_norm.weight": "model-00003-of-00004.safetensors",
212
+ "model.transformer.blocks.29.ff_out.weight": "model-00003-of-00004.safetensors",
213
+ "model.transformer.blocks.29.ff_proj.weight": "model-00003-of-00004.safetensors",
214
+ "model.transformer.blocks.29.k_proj.weight": "model-00003-of-00004.safetensors",
215
+ "model.transformer.blocks.29.q_proj.weight": "model-00003-of-00004.safetensors",
216
+ "model.transformer.blocks.29.up_proj.weight": "model-00003-of-00004.safetensors",
217
+ "model.transformer.blocks.29.v_proj.weight": "model-00003-of-00004.safetensors",
218
+ "model.transformer.blocks.3.attn_norm.weight": "model-00001-of-00004.safetensors",
219
+ "model.transformer.blocks.3.attn_out.weight": "model-00001-of-00004.safetensors",
220
+ "model.transformer.blocks.3.ff_norm.weight": "model-00001-of-00004.safetensors",
221
+ "model.transformer.blocks.3.ff_out.weight": "model-00001-of-00004.safetensors",
222
+ "model.transformer.blocks.3.ff_proj.weight": "model-00001-of-00004.safetensors",
223
+ "model.transformer.blocks.3.k_proj.weight": "model-00001-of-00004.safetensors",
224
+ "model.transformer.blocks.3.q_proj.weight": "model-00001-of-00004.safetensors",
225
+ "model.transformer.blocks.3.up_proj.weight": "model-00001-of-00004.safetensors",
226
+ "model.transformer.blocks.3.v_proj.weight": "model-00001-of-00004.safetensors",
227
+ "model.transformer.blocks.30.attn_norm.weight": "model-00003-of-00004.safetensors",
228
+ "model.transformer.blocks.30.attn_out.weight": "model-00003-of-00004.safetensors",
229
+ "model.transformer.blocks.30.ff_norm.weight": "model-00003-of-00004.safetensors",
230
+ "model.transformer.blocks.30.ff_out.weight": "model-00003-of-00004.safetensors",
231
+ "model.transformer.blocks.30.ff_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.transformer.blocks.30.k_proj.weight": "model-00003-of-00004.safetensors",
233
+ "model.transformer.blocks.30.q_proj.weight": "model-00003-of-00004.safetensors",
234
+ "model.transformer.blocks.30.up_proj.weight": "model-00003-of-00004.safetensors",
235
+ "model.transformer.blocks.30.v_proj.weight": "model-00003-of-00004.safetensors",
236
+ "model.transformer.blocks.31.attn_norm.weight": "model-00003-of-00004.safetensors",
237
+ "model.transformer.blocks.31.attn_out.weight": "model-00003-of-00004.safetensors",
238
+ "model.transformer.blocks.31.ff_norm.weight": "model-00003-of-00004.safetensors",
239
+ "model.transformer.blocks.31.ff_out.weight": "model-00003-of-00004.safetensors",
240
+ "model.transformer.blocks.31.ff_proj.weight": "model-00003-of-00004.safetensors",
241
+ "model.transformer.blocks.31.k_proj.weight": "model-00003-of-00004.safetensors",
242
+ "model.transformer.blocks.31.q_proj.weight": "model-00003-of-00004.safetensors",
243
+ "model.transformer.blocks.31.up_proj.weight": "model-00003-of-00004.safetensors",
244
+ "model.transformer.blocks.31.v_proj.weight": "model-00003-of-00004.safetensors",
245
+ "model.transformer.blocks.4.attn_norm.weight": "model-00001-of-00004.safetensors",
246
+ "model.transformer.blocks.4.attn_out.weight": "model-00001-of-00004.safetensors",
247
+ "model.transformer.blocks.4.ff_norm.weight": "model-00001-of-00004.safetensors",
248
+ "model.transformer.blocks.4.ff_out.weight": "model-00001-of-00004.safetensors",
249
+ "model.transformer.blocks.4.ff_proj.weight": "model-00001-of-00004.safetensors",
250
+ "model.transformer.blocks.4.k_proj.weight": "model-00001-of-00004.safetensors",
251
+ "model.transformer.blocks.4.q_proj.weight": "model-00001-of-00004.safetensors",
252
+ "model.transformer.blocks.4.up_proj.weight": "model-00001-of-00004.safetensors",
253
+ "model.transformer.blocks.4.v_proj.weight": "model-00001-of-00004.safetensors",
254
+ "model.transformer.blocks.5.attn_norm.weight": "model-00001-of-00004.safetensors",
255
+ "model.transformer.blocks.5.attn_out.weight": "model-00001-of-00004.safetensors",
256
+ "model.transformer.blocks.5.ff_norm.weight": "model-00001-of-00004.safetensors",
257
+ "model.transformer.blocks.5.ff_out.weight": "model-00001-of-00004.safetensors",
258
+ "model.transformer.blocks.5.ff_proj.weight": "model-00001-of-00004.safetensors",
259
+ "model.transformer.blocks.5.k_proj.weight": "model-00001-of-00004.safetensors",
260
+ "model.transformer.blocks.5.q_proj.weight": "model-00001-of-00004.safetensors",
261
+ "model.transformer.blocks.5.up_proj.weight": "model-00001-of-00004.safetensors",
262
+ "model.transformer.blocks.5.v_proj.weight": "model-00001-of-00004.safetensors",
263
+ "model.transformer.blocks.6.attn_norm.weight": "model-00001-of-00004.safetensors",
264
+ "model.transformer.blocks.6.attn_out.weight": "model-00001-of-00004.safetensors",
265
+ "model.transformer.blocks.6.ff_norm.weight": "model-00001-of-00004.safetensors",
266
+ "model.transformer.blocks.6.ff_out.weight": "model-00001-of-00004.safetensors",
267
+ "model.transformer.blocks.6.ff_proj.weight": "model-00001-of-00004.safetensors",
268
+ "model.transformer.blocks.6.k_proj.weight": "model-00001-of-00004.safetensors",
269
+ "model.transformer.blocks.6.q_proj.weight": "model-00001-of-00004.safetensors",
270
+ "model.transformer.blocks.6.up_proj.weight": "model-00001-of-00004.safetensors",
271
+ "model.transformer.blocks.6.v_proj.weight": "model-00001-of-00004.safetensors",
272
+ "model.transformer.blocks.7.attn_norm.weight": "model-00001-of-00004.safetensors",
273
+ "model.transformer.blocks.7.attn_out.weight": "model-00001-of-00004.safetensors",
274
+ "model.transformer.blocks.7.ff_norm.weight": "model-00001-of-00004.safetensors",
275
+ "model.transformer.blocks.7.ff_out.weight": "model-00001-of-00004.safetensors",
276
+ "model.transformer.blocks.7.ff_proj.weight": "model-00001-of-00004.safetensors",
277
+ "model.transformer.blocks.7.k_proj.weight": "model-00001-of-00004.safetensors",
278
+ "model.transformer.blocks.7.q_proj.weight": "model-00001-of-00004.safetensors",
279
+ "model.transformer.blocks.7.up_proj.weight": "model-00001-of-00004.safetensors",
280
+ "model.transformer.blocks.7.v_proj.weight": "model-00001-of-00004.safetensors",
281
+ "model.transformer.blocks.8.attn_norm.weight": "model-00001-of-00004.safetensors",
282
+ "model.transformer.blocks.8.attn_out.weight": "model-00001-of-00004.safetensors",
283
+ "model.transformer.blocks.8.ff_norm.weight": "model-00001-of-00004.safetensors",
284
+ "model.transformer.blocks.8.ff_out.weight": "model-00001-of-00004.safetensors",
285
+ "model.transformer.blocks.8.ff_proj.weight": "model-00001-of-00004.safetensors",
286
+ "model.transformer.blocks.8.k_proj.weight": "model-00001-of-00004.safetensors",
287
+ "model.transformer.blocks.8.q_proj.weight": "model-00001-of-00004.safetensors",
288
+ "model.transformer.blocks.8.up_proj.weight": "model-00001-of-00004.safetensors",
289
+ "model.transformer.blocks.8.v_proj.weight": "model-00001-of-00004.safetensors",
290
+ "model.transformer.blocks.9.attn_norm.weight": "model-00002-of-00004.safetensors",
291
+ "model.transformer.blocks.9.attn_out.weight": "model-00001-of-00004.safetensors",
292
+ "model.transformer.blocks.9.ff_norm.weight": "model-00002-of-00004.safetensors",
293
+ "model.transformer.blocks.9.ff_out.weight": "model-00002-of-00004.safetensors",
294
+ "model.transformer.blocks.9.ff_proj.weight": "model-00002-of-00004.safetensors",
295
+ "model.transformer.blocks.9.k_proj.weight": "model-00002-of-00004.safetensors",
296
+ "model.transformer.blocks.9.q_proj.weight": "model-00002-of-00004.safetensors",
297
+ "model.transformer.blocks.9.up_proj.weight": "model-00002-of-00004.safetensors",
298
+ "model.transformer.blocks.9.v_proj.weight": "model-00002-of-00004.safetensors",
299
+ "model.transformer.ff_out.weight": "model-00004-of-00004.safetensors",
300
+ "model.transformer.ln_f.weight": "model-00001-of-00004.safetensors",
301
+ "model.transformer.wte.weight": "model-00001-of-00004.safetensors",
302
+ "model.vision_tower.vision_tower.vision_model.embeddings.patch_embedding.bias": "model-00004-of-00004.safetensors",
303
+ "model.vision_tower.vision_tower.vision_model.embeddings.patch_embedding.weight": "model-00004-of-00004.safetensors",
304
+ "model.vision_tower.vision_tower.vision_model.embeddings.position_embedding.weight": "model-00004-of-00004.safetensors",
305
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.bias": "model-00004-of-00004.safetensors",
306
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.weight": "model-00004-of-00004.safetensors",
307
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.bias": "model-00004-of-00004.safetensors",
308
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.weight": "model-00004-of-00004.safetensors",
309
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00004-of-00004.safetensors",
310
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00004-of-00004.safetensors",
311
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00004-of-00004.safetensors",
312
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00004-of-00004.safetensors",
313
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
314
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
315
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
316
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
317
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
318
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
319
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
320
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
321
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.bias": "model-00004-of-00004.safetensors",
322
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.weight": "model-00004-of-00004.safetensors",
323
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.bias": "model-00004-of-00004.safetensors",
324
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.weight": "model-00004-of-00004.safetensors",
325
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00004-of-00004.safetensors",
326
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00004-of-00004.safetensors",
327
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00004-of-00004.safetensors",
328
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00004-of-00004.safetensors",
329
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
330
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
331
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
332
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
333
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
334
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
335
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
336
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
337
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.bias": "model-00004-of-00004.safetensors",
338
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.weight": "model-00004-of-00004.safetensors",
339
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.bias": "model-00004-of-00004.safetensors",
340
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.weight": "model-00004-of-00004.safetensors",
341
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00004-of-00004.safetensors",
342
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00004-of-00004.safetensors",
343
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00004-of-00004.safetensors",
344
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00004-of-00004.safetensors",
345
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
346
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
347
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
348
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
349
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
350
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
351
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
352
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
353
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.bias": "model-00004-of-00004.safetensors",
354
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.weight": "model-00004-of-00004.safetensors",
355
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.bias": "model-00004-of-00004.safetensors",
356
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.weight": "model-00004-of-00004.safetensors",
357
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00004-of-00004.safetensors",
358
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00004-of-00004.safetensors",
359
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00004-of-00004.safetensors",
360
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00004-of-00004.safetensors",
361
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
362
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
363
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
364
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
365
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
366
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
367
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
368
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
369
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.bias": "model-00004-of-00004.safetensors",
370
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.weight": "model-00004-of-00004.safetensors",
371
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.bias": "model-00004-of-00004.safetensors",
372
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.weight": "model-00004-of-00004.safetensors",
373
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.bias": "model-00004-of-00004.safetensors",
374
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.weight": "model-00004-of-00004.safetensors",
375
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.bias": "model-00004-of-00004.safetensors",
376
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.weight": "model-00004-of-00004.safetensors",
377
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
378
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
379
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
380
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
381
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
382
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
383
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
384
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
385
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.bias": "model-00004-of-00004.safetensors",
386
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.weight": "model-00004-of-00004.safetensors",
387
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.bias": "model-00004-of-00004.safetensors",
388
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.weight": "model-00004-of-00004.safetensors",
389
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.bias": "model-00004-of-00004.safetensors",
390
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.weight": "model-00004-of-00004.safetensors",
391
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.bias": "model-00004-of-00004.safetensors",
392
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.weight": "model-00004-of-00004.safetensors",
393
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
394
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
395
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
396
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
397
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
398
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
399
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
400
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
401
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.bias": "model-00004-of-00004.safetensors",
402
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.weight": "model-00004-of-00004.safetensors",
403
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.bias": "model-00004-of-00004.safetensors",
404
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.weight": "model-00004-of-00004.safetensors",
405
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.bias": "model-00004-of-00004.safetensors",
406
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.weight": "model-00004-of-00004.safetensors",
407
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.bias": "model-00004-of-00004.safetensors",
408
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.weight": "model-00004-of-00004.safetensors",
409
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
410
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
411
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
412
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
413
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
414
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
415
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
416
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
417
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.bias": "model-00004-of-00004.safetensors",
418
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.weight": "model-00004-of-00004.safetensors",
419
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.bias": "model-00004-of-00004.safetensors",
420
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.weight": "model-00004-of-00004.safetensors",
421
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.bias": "model-00004-of-00004.safetensors",
422
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.weight": "model-00004-of-00004.safetensors",
423
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.bias": "model-00004-of-00004.safetensors",
424
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.weight": "model-00004-of-00004.safetensors",
425
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
426
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
427
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
428
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
429
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
430
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
431
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
432
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
433
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.bias": "model-00004-of-00004.safetensors",
434
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.weight": "model-00004-of-00004.safetensors",
435
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.bias": "model-00004-of-00004.safetensors",
436
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.weight": "model-00004-of-00004.safetensors",
437
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.bias": "model-00004-of-00004.safetensors",
438
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.weight": "model-00004-of-00004.safetensors",
439
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.bias": "model-00004-of-00004.safetensors",
440
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.weight": "model-00004-of-00004.safetensors",
441
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
442
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
443
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
444
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
445
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
446
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
447
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
448
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
449
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.bias": "model-00004-of-00004.safetensors",
450
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.weight": "model-00004-of-00004.safetensors",
451
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.bias": "model-00004-of-00004.safetensors",
452
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.weight": "model-00004-of-00004.safetensors",
453
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.bias": "model-00004-of-00004.safetensors",
454
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.weight": "model-00004-of-00004.safetensors",
455
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.bias": "model-00004-of-00004.safetensors",
456
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.weight": "model-00004-of-00004.safetensors",
457
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
458
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
459
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
460
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
461
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
462
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
463
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
464
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
465
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.bias": "model-00004-of-00004.safetensors",
466
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.weight": "model-00004-of-00004.safetensors",
467
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.bias": "model-00004-of-00004.safetensors",
468
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.weight": "model-00004-of-00004.safetensors",
469
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.bias": "model-00004-of-00004.safetensors",
470
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.weight": "model-00004-of-00004.safetensors",
471
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.bias": "model-00004-of-00004.safetensors",
472
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.weight": "model-00004-of-00004.safetensors",
473
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
474
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
475
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
476
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
477
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
478
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
479
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
480
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
481
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.bias": "model-00004-of-00004.safetensors",
482
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.weight": "model-00004-of-00004.safetensors",
483
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.bias": "model-00004-of-00004.safetensors",
484
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.weight": "model-00004-of-00004.safetensors",
485
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.bias": "model-00004-of-00004.safetensors",
486
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.weight": "model-00004-of-00004.safetensors",
487
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.bias": "model-00004-of-00004.safetensors",
488
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.weight": "model-00004-of-00004.safetensors",
489
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
490
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
491
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
492
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
493
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
494
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
495
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
496
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
497
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.bias": "model-00004-of-00004.safetensors",
498
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.weight": "model-00004-of-00004.safetensors",
499
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.bias": "model-00004-of-00004.safetensors",
500
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.weight": "model-00004-of-00004.safetensors",
501
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00004-of-00004.safetensors",
502
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00004-of-00004.safetensors",
503
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00004-of-00004.safetensors",
504
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00004-of-00004.safetensors",
505
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
506
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
507
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
508
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
509
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
510
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
511
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
512
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
513
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.bias": "model-00004-of-00004.safetensors",
514
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.weight": "model-00004-of-00004.safetensors",
515
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.bias": "model-00004-of-00004.safetensors",
516
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.weight": "model-00004-of-00004.safetensors",
517
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.bias": "model-00004-of-00004.safetensors",
518
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.weight": "model-00004-of-00004.safetensors",
519
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.bias": "model-00004-of-00004.safetensors",
520
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.weight": "model-00004-of-00004.safetensors",
521
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
522
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
523
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
524
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
525
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
526
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
527
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
528
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
529
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.bias": "model-00004-of-00004.safetensors",
530
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.weight": "model-00004-of-00004.safetensors",
531
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.bias": "model-00004-of-00004.safetensors",
532
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.weight": "model-00004-of-00004.safetensors",
533
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.bias": "model-00004-of-00004.safetensors",
534
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.weight": "model-00004-of-00004.safetensors",
535
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.bias": "model-00004-of-00004.safetensors",
536
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.weight": "model-00004-of-00004.safetensors",
537
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
538
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
539
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
540
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
541
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
542
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
543
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
544
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
545
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.bias": "model-00004-of-00004.safetensors",
546
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.weight": "model-00004-of-00004.safetensors",
547
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.bias": "model-00004-of-00004.safetensors",
548
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.weight": "model-00004-of-00004.safetensors",
549
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.bias": "model-00004-of-00004.safetensors",
550
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.weight": "model-00004-of-00004.safetensors",
551
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.bias": "model-00004-of-00004.safetensors",
552
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.weight": "model-00004-of-00004.safetensors",
553
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
554
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
555
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
556
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
557
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
558
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
559
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
560
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
561
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.bias": "model-00004-of-00004.safetensors",
562
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.weight": "model-00004-of-00004.safetensors",
563
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.bias": "model-00004-of-00004.safetensors",
564
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.weight": "model-00004-of-00004.safetensors",
565
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.bias": "model-00004-of-00004.safetensors",
566
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.weight": "model-00004-of-00004.safetensors",
567
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.bias": "model-00004-of-00004.safetensors",
568
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.weight": "model-00004-of-00004.safetensors",
569
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
570
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
571
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
572
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
573
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
574
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
575
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
576
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
577
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.layer_norm1.bias": "model-00004-of-00004.safetensors",
578
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.layer_norm1.weight": "model-00004-of-00004.safetensors",
579
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.layer_norm2.bias": "model-00004-of-00004.safetensors",
580
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.layer_norm2.weight": "model-00004-of-00004.safetensors",
581
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.mlp.fc1.bias": "model-00004-of-00004.safetensors",
582
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.mlp.fc1.weight": "model-00004-of-00004.safetensors",
583
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.mlp.fc2.bias": "model-00004-of-00004.safetensors",
584
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.mlp.fc2.weight": "model-00004-of-00004.safetensors",
585
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
586
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
587
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
588
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
589
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
590
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
591
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
592
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
593
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.layer_norm1.bias": "model-00004-of-00004.safetensors",
594
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.layer_norm1.weight": "model-00004-of-00004.safetensors",
595
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.layer_norm2.bias": "model-00004-of-00004.safetensors",
596
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.layer_norm2.weight": "model-00004-of-00004.safetensors",
597
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.mlp.fc1.bias": "model-00004-of-00004.safetensors",
598
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.mlp.fc1.weight": "model-00004-of-00004.safetensors",
599
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.mlp.fc2.bias": "model-00004-of-00004.safetensors",
600
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.mlp.fc2.weight": "model-00004-of-00004.safetensors",
601
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
602
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
603
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
604
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
605
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
606
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
607
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
608
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
609
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.bias": "model-00004-of-00004.safetensors",
610
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.weight": "model-00004-of-00004.safetensors",
611
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.bias": "model-00004-of-00004.safetensors",
612
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.weight": "model-00004-of-00004.safetensors",
613
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00004-of-00004.safetensors",
614
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00004-of-00004.safetensors",
615
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00004-of-00004.safetensors",
616
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00004-of-00004.safetensors",
617
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
618
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
619
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
620
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
621
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
622
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
623
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
624
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
625
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.bias": "model-00004-of-00004.safetensors",
626
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.weight": "model-00004-of-00004.safetensors",
627
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.bias": "model-00004-of-00004.safetensors",
628
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.weight": "model-00004-of-00004.safetensors",
629
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00004-of-00004.safetensors",
630
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00004-of-00004.safetensors",
631
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00004-of-00004.safetensors",
632
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00004-of-00004.safetensors",
633
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
634
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
635
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
636
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
637
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
638
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
639
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
640
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
641
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.bias": "model-00004-of-00004.safetensors",
642
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.weight": "model-00004-of-00004.safetensors",
643
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.bias": "model-00004-of-00004.safetensors",
644
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.weight": "model-00004-of-00004.safetensors",
645
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00004-of-00004.safetensors",
646
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00004-of-00004.safetensors",
647
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00004-of-00004.safetensors",
648
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00004-of-00004.safetensors",
649
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
650
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
651
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
652
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
653
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
654
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
655
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
656
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
657
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.bias": "model-00004-of-00004.safetensors",
658
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.weight": "model-00004-of-00004.safetensors",
659
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.bias": "model-00004-of-00004.safetensors",
660
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.weight": "model-00004-of-00004.safetensors",
661
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00004-of-00004.safetensors",
662
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00004-of-00004.safetensors",
663
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00004-of-00004.safetensors",
664
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00004-of-00004.safetensors",
665
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
666
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
667
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
668
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
669
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
670
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
671
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
672
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
673
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.bias": "model-00004-of-00004.safetensors",
674
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.weight": "model-00004-of-00004.safetensors",
675
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.bias": "model-00004-of-00004.safetensors",
676
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.weight": "model-00004-of-00004.safetensors",
677
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00004-of-00004.safetensors",
678
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00004-of-00004.safetensors",
679
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00004-of-00004.safetensors",
680
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00004-of-00004.safetensors",
681
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
682
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
683
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
684
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
685
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
686
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
687
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
688
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
689
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.bias": "model-00004-of-00004.safetensors",
690
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.weight": "model-00004-of-00004.safetensors",
691
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.bias": "model-00004-of-00004.safetensors",
692
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.weight": "model-00004-of-00004.safetensors",
693
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00004-of-00004.safetensors",
694
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00004-of-00004.safetensors",
695
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00004-of-00004.safetensors",
696
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00004-of-00004.safetensors",
697
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
698
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
699
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
700
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
701
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
702
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
703
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
704
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
705
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.bias": "model-00004-of-00004.safetensors",
706
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.weight": "model-00004-of-00004.safetensors",
707
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.bias": "model-00004-of-00004.safetensors",
708
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.weight": "model-00004-of-00004.safetensors",
709
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00004-of-00004.safetensors",
710
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00004-of-00004.safetensors",
711
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00004-of-00004.safetensors",
712
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00004-of-00004.safetensors",
713
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
714
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
715
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
716
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
717
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
718
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
719
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
720
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
721
+ "model.vision_tower.vision_tower.vision_model.post_layernorm.bias": "model-00004-of-00004.safetensors",
722
+ "model.vision_tower.vision_tower.vision_model.post_layernorm.weight": "model-00004-of-00004.safetensors"
723
+ }
724
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<role>",
4
+ "</role>",
5
+ "<|arithmetic_start|>",
6
+ "<|arithmetic_end|>",
7
+ "<|number_start|>",
8
+ "<|number_end|>"
9
+ ],
10
+ "bos_token": {
11
+ "content": "<|startoftext|>",
12
+ "lstrip": false,
13
+ "normalized": false,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ },
17
+ "cls_token": {
18
+ "content": "[CLS]",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "eos_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ },
31
+ "pad_token": {
32
+ "content": "<|endoftext|>",
33
+ "lstrip": false,
34
+ "normalized": false,
35
+ "rstrip": false,
36
+ "single_word": false
37
+ }
38
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,2184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_eos_token": false,
4
+ "added_tokens_decoder": {
5
+ "126080": {
6
+ "content": "<|startoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "126081": {
14
+ "content": "<|endoftext|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "126082": {
22
+ "content": "[CLS]",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "126083": {
30
+ "content": "[gMASK]",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "126084": {
38
+ "content": "<|reserved_token_0|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "126085": {
46
+ "content": "<|reserved_token_1|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "126086": {
54
+ "content": "<|reserved_token_2|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "126087": {
62
+ "content": "<|reserved_token_3|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "126088": {
70
+ "content": "<|reserved_token_4|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "126089": {
78
+ "content": "<|reserved_token_5|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "126090": {
86
+ "content": "<|reserved_token_6|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "126091": {
94
+ "content": "<|reserved_token_7|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "126092": {
102
+ "content": "<|reserved_token_8|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "126093": {
110
+ "content": "<|reserved_token_9|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "126094": {
118
+ "content": "<|reserved_token_10|>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": true
124
+ },
125
+ "126095": {
126
+ "content": "<|reserved_token_11|>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": true
132
+ },
133
+ "126096": {
134
+ "content": "<|reserved_token_12|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": true
140
+ },
141
+ "126097": {
142
+ "content": "<|reserved_token_13|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": true
148
+ },
149
+ "126098": {
150
+ "content": "<|reserved_token_14|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": true
156
+ },
157
+ "126099": {
158
+ "content": "<|reserved_token_15|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": true
164
+ },
165
+ "126100": {
166
+ "content": "<|reserved_token_16|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": true
172
+ },
173
+ "126101": {
174
+ "content": "<|reserved_token_17|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": true
180
+ },
181
+ "126102": {
182
+ "content": "<|reserved_token_18|>",
183
+ "lstrip": false,
184
+ "normalized": false,
185
+ "rstrip": false,
186
+ "single_word": false,
187
+ "special": true
188
+ },
189
+ "126103": {
190
+ "content": "<|reserved_token_19|>",
191
+ "lstrip": false,
192
+ "normalized": false,
193
+ "rstrip": false,
194
+ "single_word": false,
195
+ "special": true
196
+ },
197
+ "126104": {
198
+ "content": "<|reserved_token_20|>",
199
+ "lstrip": false,
200
+ "normalized": false,
201
+ "rstrip": false,
202
+ "single_word": false,
203
+ "special": true
204
+ },
205
+ "126105": {
206
+ "content": "<|reserved_token_21|>",
207
+ "lstrip": false,
208
+ "normalized": false,
209
+ "rstrip": false,
210
+ "single_word": false,
211
+ "special": true
212
+ },
213
+ "126106": {
214
+ "content": "<|reserved_token_22|>",
215
+ "lstrip": false,
216
+ "normalized": false,
217
+ "rstrip": false,
218
+ "single_word": false,
219
+ "special": true
220
+ },
221
+ "126107": {
222
+ "content": "<|reserved_token_23|>",
223
+ "lstrip": false,
224
+ "normalized": false,
225
+ "rstrip": false,
226
+ "single_word": false,
227
+ "special": true
228
+ },
229
+ "126108": {
230
+ "content": "<|reserved_token_24|>",
231
+ "lstrip": false,
232
+ "normalized": false,
233
+ "rstrip": false,
234
+ "single_word": false,
235
+ "special": true
236
+ },
237
+ "126109": {
238
+ "content": "<|reserved_token_25|>",
239
+ "lstrip": false,
240
+ "normalized": false,
241
+ "rstrip": false,
242
+ "single_word": false,
243
+ "special": true
244
+ },
245
+ "126110": {
246
+ "content": "<|reserved_token_26|>",
247
+ "lstrip": false,
248
+ "normalized": false,
249
+ "rstrip": false,
250
+ "single_word": false,
251
+ "special": true
252
+ },
253
+ "126111": {
254
+ "content": "<|reserved_token_27|>",
255
+ "lstrip": false,
256
+ "normalized": false,
257
+ "rstrip": false,
258
+ "single_word": false,
259
+ "special": true
260
+ },
261
+ "126112": {
262
+ "content": "<|reserved_token_28|>",
263
+ "lstrip": false,
264
+ "normalized": false,
265
+ "rstrip": false,
266
+ "single_word": false,
267
+ "special": true
268
+ },
269
+ "126113": {
270
+ "content": "<|reserved_token_29|>",
271
+ "lstrip": false,
272
+ "normalized": false,
273
+ "rstrip": false,
274
+ "single_word": false,
275
+ "special": true
276
+ },
277
+ "126114": {
278
+ "content": "<|reserved_token_30|>",
279
+ "lstrip": false,
280
+ "normalized": false,
281
+ "rstrip": false,
282
+ "single_word": false,
283
+ "special": true
284
+ },
285
+ "126115": {
286
+ "content": "<|reserved_token_31|>",
287
+ "lstrip": false,
288
+ "normalized": false,
289
+ "rstrip": false,
290
+ "single_word": false,
291
+ "special": true
292
+ },
293
+ "126116": {
294
+ "content": "<|reserved_token_32|>",
295
+ "lstrip": false,
296
+ "normalized": false,
297
+ "rstrip": false,
298
+ "single_word": false,
299
+ "special": true
300
+ },
301
+ "126117": {
302
+ "content": "<|reserved_token_33|>",
303
+ "lstrip": false,
304
+ "normalized": false,
305
+ "rstrip": false,
306
+ "single_word": false,
307
+ "special": true
308
+ },
309
+ "126118": {
310
+ "content": "<|reserved_token_34|>",
311
+ "lstrip": false,
312
+ "normalized": false,
313
+ "rstrip": false,
314
+ "single_word": false,
315
+ "special": true
316
+ },
317
+ "126119": {
318
+ "content": "<|reserved_token_35|>",
319
+ "lstrip": false,
320
+ "normalized": false,
321
+ "rstrip": false,
322
+ "single_word": false,
323
+ "special": true
324
+ },
325
+ "126120": {
326
+ "content": "<|reserved_token_36|>",
327
+ "lstrip": false,
328
+ "normalized": false,
329
+ "rstrip": false,
330
+ "single_word": false,
331
+ "special": true
332
+ },
333
+ "126121": {
334
+ "content": "<|reserved_token_37|>",
335
+ "lstrip": false,
336
+ "normalized": false,
337
+ "rstrip": false,
338
+ "single_word": false,
339
+ "special": true
340
+ },
341
+ "126122": {
342
+ "content": "<|reserved_token_38|>",
343
+ "lstrip": false,
344
+ "normalized": false,
345
+ "rstrip": false,
346
+ "single_word": false,
347
+ "special": true
348
+ },
349
+ "126123": {
350
+ "content": "<|reserved_token_39|>",
351
+ "lstrip": false,
352
+ "normalized": false,
353
+ "rstrip": false,
354
+ "single_word": false,
355
+ "special": true
356
+ },
357
+ "126124": {
358
+ "content": "<|reserved_token_40|>",
359
+ "lstrip": false,
360
+ "normalized": false,
361
+ "rstrip": false,
362
+ "single_word": false,
363
+ "special": true
364
+ },
365
+ "126125": {
366
+ "content": "<|reserved_token_41|>",
367
+ "lstrip": false,
368
+ "normalized": false,
369
+ "rstrip": false,
370
+ "single_word": false,
371
+ "special": true
372
+ },
373
+ "126126": {
374
+ "content": "<|reserved_token_42|>",
375
+ "lstrip": false,
376
+ "normalized": false,
377
+ "rstrip": false,
378
+ "single_word": false,
379
+ "special": true
380
+ },
381
+ "126127": {
382
+ "content": "<|reserved_token_43|>",
383
+ "lstrip": false,
384
+ "normalized": false,
385
+ "rstrip": false,
386
+ "single_word": false,
387
+ "special": true
388
+ },
389
+ "126128": {
390
+ "content": "<|reserved_token_44|>",
391
+ "lstrip": false,
392
+ "normalized": false,
393
+ "rstrip": false,
394
+ "single_word": false,
395
+ "special": true
396
+ },
397
+ "126129": {
398
+ "content": "<|reserved_token_45|>",
399
+ "lstrip": false,
400
+ "normalized": false,
401
+ "rstrip": false,
402
+ "single_word": false,
403
+ "special": true
404
+ },
405
+ "126130": {
406
+ "content": "<|reserved_token_46|>",
407
+ "lstrip": false,
408
+ "normalized": false,
409
+ "rstrip": false,
410
+ "single_word": false,
411
+ "special": true
412
+ },
413
+ "126131": {
414
+ "content": "<|reserved_token_47|>",
415
+ "lstrip": false,
416
+ "normalized": false,
417
+ "rstrip": false,
418
+ "single_word": false,
419
+ "special": true
420
+ },
421
+ "126132": {
422
+ "content": "<|reserved_token_48|>",
423
+ "lstrip": false,
424
+ "normalized": false,
425
+ "rstrip": false,
426
+ "single_word": false,
427
+ "special": true
428
+ },
429
+ "126133": {
430
+ "content": "<|reserved_token_49|>",
431
+ "lstrip": false,
432
+ "normalized": false,
433
+ "rstrip": false,
434
+ "single_word": false,
435
+ "special": true
436
+ },
437
+ "126134": {
438
+ "content": "<|reserved_token_50|>",
439
+ "lstrip": false,
440
+ "normalized": false,
441
+ "rstrip": false,
442
+ "single_word": false,
443
+ "special": true
444
+ },
445
+ "126135": {
446
+ "content": "<|reserved_token_51|>",
447
+ "lstrip": false,
448
+ "normalized": false,
449
+ "rstrip": false,
450
+ "single_word": false,
451
+ "special": true
452
+ },
453
+ "126136": {
454
+ "content": "<|reserved_token_52|>",
455
+ "lstrip": false,
456
+ "normalized": false,
457
+ "rstrip": false,
458
+ "single_word": false,
459
+ "special": true
460
+ },
461
+ "126137": {
462
+ "content": "<|reserved_token_53|>",
463
+ "lstrip": false,
464
+ "normalized": false,
465
+ "rstrip": false,
466
+ "single_word": false,
467
+ "special": true
468
+ },
469
+ "126138": {
470
+ "content": "<|reserved_token_54|>",
471
+ "lstrip": false,
472
+ "normalized": false,
473
+ "rstrip": false,
474
+ "single_word": false,
475
+ "special": true
476
+ },
477
+ "126139": {
478
+ "content": "<|reserved_token_55|>",
479
+ "lstrip": false,
480
+ "normalized": false,
481
+ "rstrip": false,
482
+ "single_word": false,
483
+ "special": true
484
+ },
485
+ "126140": {
486
+ "content": "<|reserved_token_56|>",
487
+ "lstrip": false,
488
+ "normalized": false,
489
+ "rstrip": false,
490
+ "single_word": false,
491
+ "special": true
492
+ },
493
+ "126141": {
494
+ "content": "<|reserved_token_57|>",
495
+ "lstrip": false,
496
+ "normalized": false,
497
+ "rstrip": false,
498
+ "single_word": false,
499
+ "special": true
500
+ },
501
+ "126142": {
502
+ "content": "<|reserved_token_58|>",
503
+ "lstrip": false,
504
+ "normalized": false,
505
+ "rstrip": false,
506
+ "single_word": false,
507
+ "special": true
508
+ },
509
+ "126143": {
510
+ "content": "<|reserved_token_59|>",
511
+ "lstrip": false,
512
+ "normalized": false,
513
+ "rstrip": false,
514
+ "single_word": false,
515
+ "special": true
516
+ },
517
+ "126144": {
518
+ "content": "<|reserved_token_60|>",
519
+ "lstrip": false,
520
+ "normalized": false,
521
+ "rstrip": false,
522
+ "single_word": false,
523
+ "special": true
524
+ },
525
+ "126145": {
526
+ "content": "<|reserved_token_61|>",
527
+ "lstrip": false,
528
+ "normalized": false,
529
+ "rstrip": false,
530
+ "single_word": false,
531
+ "special": true
532
+ },
533
+ "126146": {
534
+ "content": "<|reserved_token_62|>",
535
+ "lstrip": false,
536
+ "normalized": false,
537
+ "rstrip": false,
538
+ "single_word": false,
539
+ "special": true
540
+ },
541
+ "126147": {
542
+ "content": "<|reserved_token_63|>",
543
+ "lstrip": false,
544
+ "normalized": false,
545
+ "rstrip": false,
546
+ "single_word": false,
547
+ "special": true
548
+ },
549
+ "126148": {
550
+ "content": "<|reserved_token_64|>",
551
+ "lstrip": false,
552
+ "normalized": false,
553
+ "rstrip": false,
554
+ "single_word": false,
555
+ "special": true
556
+ },
557
+ "126149": {
558
+ "content": "<|reserved_token_65|>",
559
+ "lstrip": false,
560
+ "normalized": false,
561
+ "rstrip": false,
562
+ "single_word": false,
563
+ "special": true
564
+ },
565
+ "126150": {
566
+ "content": "<|reserved_token_66|>",
567
+ "lstrip": false,
568
+ "normalized": false,
569
+ "rstrip": false,
570
+ "single_word": false,
571
+ "special": true
572
+ },
573
+ "126151": {
574
+ "content": "<|reserved_token_67|>",
575
+ "lstrip": false,
576
+ "normalized": false,
577
+ "rstrip": false,
578
+ "single_word": false,
579
+ "special": true
580
+ },
581
+ "126152": {
582
+ "content": "<|reserved_token_68|>",
583
+ "lstrip": false,
584
+ "normalized": false,
585
+ "rstrip": false,
586
+ "single_word": false,
587
+ "special": true
588
+ },
589
+ "126153": {
590
+ "content": "<|reserved_token_69|>",
591
+ "lstrip": false,
592
+ "normalized": false,
593
+ "rstrip": false,
594
+ "single_word": false,
595
+ "special": true
596
+ },
597
+ "126154": {
598
+ "content": "<|reserved_token_70|>",
599
+ "lstrip": false,
600
+ "normalized": false,
601
+ "rstrip": false,
602
+ "single_word": false,
603
+ "special": true
604
+ },
605
+ "126155": {
606
+ "content": "<|reserved_token_71|>",
607
+ "lstrip": false,
608
+ "normalized": false,
609
+ "rstrip": false,
610
+ "single_word": false,
611
+ "special": true
612
+ },
613
+ "126156": {
614
+ "content": "<|reserved_token_72|>",
615
+ "lstrip": false,
616
+ "normalized": false,
617
+ "rstrip": false,
618
+ "single_word": false,
619
+ "special": true
620
+ },
621
+ "126157": {
622
+ "content": "<|reserved_token_73|>",
623
+ "lstrip": false,
624
+ "normalized": false,
625
+ "rstrip": false,
626
+ "single_word": false,
627
+ "special": true
628
+ },
629
+ "126158": {
630
+ "content": "<|reserved_token_74|>",
631
+ "lstrip": false,
632
+ "normalized": false,
633
+ "rstrip": false,
634
+ "single_word": false,
635
+ "special": true
636
+ },
637
+ "126159": {
638
+ "content": "<|reserved_token_75|>",
639
+ "lstrip": false,
640
+ "normalized": false,
641
+ "rstrip": false,
642
+ "single_word": false,
643
+ "special": true
644
+ },
645
+ "126160": {
646
+ "content": "<|reserved_token_76|>",
647
+ "lstrip": false,
648
+ "normalized": false,
649
+ "rstrip": false,
650
+ "single_word": false,
651
+ "special": true
652
+ },
653
+ "126161": {
654
+ "content": "<|reserved_token_77|>",
655
+ "lstrip": false,
656
+ "normalized": false,
657
+ "rstrip": false,
658
+ "single_word": false,
659
+ "special": true
660
+ },
661
+ "126162": {
662
+ "content": "<|reserved_token_78|>",
663
+ "lstrip": false,
664
+ "normalized": false,
665
+ "rstrip": false,
666
+ "single_word": false,
667
+ "special": true
668
+ },
669
+ "126163": {
670
+ "content": "<|reserved_token_79|>",
671
+ "lstrip": false,
672
+ "normalized": false,
673
+ "rstrip": false,
674
+ "single_word": false,
675
+ "special": true
676
+ },
677
+ "126164": {
678
+ "content": "<|reserved_token_80|>",
679
+ "lstrip": false,
680
+ "normalized": false,
681
+ "rstrip": false,
682
+ "single_word": false,
683
+ "special": true
684
+ },
685
+ "126165": {
686
+ "content": "<|reserved_token_81|>",
687
+ "lstrip": false,
688
+ "normalized": false,
689
+ "rstrip": false,
690
+ "single_word": false,
691
+ "special": true
692
+ },
693
+ "126166": {
694
+ "content": "<|reserved_token_82|>",
695
+ "lstrip": false,
696
+ "normalized": false,
697
+ "rstrip": false,
698
+ "single_word": false,
699
+ "special": true
700
+ },
701
+ "126167": {
702
+ "content": "<|reserved_token_83|>",
703
+ "lstrip": false,
704
+ "normalized": false,
705
+ "rstrip": false,
706
+ "single_word": false,
707
+ "special": true
708
+ },
709
+ "126168": {
710
+ "content": "<|reserved_token_84|>",
711
+ "lstrip": false,
712
+ "normalized": false,
713
+ "rstrip": false,
714
+ "single_word": false,
715
+ "special": true
716
+ },
717
+ "126169": {
718
+ "content": "<|reserved_token_85|>",
719
+ "lstrip": false,
720
+ "normalized": false,
721
+ "rstrip": false,
722
+ "single_word": false,
723
+ "special": true
724
+ },
725
+ "126170": {
726
+ "content": "<|reserved_token_86|>",
727
+ "lstrip": false,
728
+ "normalized": false,
729
+ "rstrip": false,
730
+ "single_word": false,
731
+ "special": true
732
+ },
733
+ "126171": {
734
+ "content": "<|reserved_token_87|>",
735
+ "lstrip": false,
736
+ "normalized": false,
737
+ "rstrip": false,
738
+ "single_word": false,
739
+ "special": true
740
+ },
741
+ "126172": {
742
+ "content": "<|reserved_token_88|>",
743
+ "lstrip": false,
744
+ "normalized": false,
745
+ "rstrip": false,
746
+ "single_word": false,
747
+ "special": true
748
+ },
749
+ "126173": {
750
+ "content": "<|reserved_token_89|>",
751
+ "lstrip": false,
752
+ "normalized": false,
753
+ "rstrip": false,
754
+ "single_word": false,
755
+ "special": true
756
+ },
757
+ "126174": {
758
+ "content": "<|reserved_token_90|>",
759
+ "lstrip": false,
760
+ "normalized": false,
761
+ "rstrip": false,
762
+ "single_word": false,
763
+ "special": true
764
+ },
765
+ "126175": {
766
+ "content": "<|reserved_token_91|>",
767
+ "lstrip": false,
768
+ "normalized": false,
769
+ "rstrip": false,
770
+ "single_word": false,
771
+ "special": true
772
+ },
773
+ "126176": {
774
+ "content": "<|reserved_token_92|>",
775
+ "lstrip": false,
776
+ "normalized": false,
777
+ "rstrip": false,
778
+ "single_word": false,
779
+ "special": true
780
+ },
781
+ "126177": {
782
+ "content": "<|reserved_token_93|>",
783
+ "lstrip": false,
784
+ "normalized": false,
785
+ "rstrip": false,
786
+ "single_word": false,
787
+ "special": true
788
+ },
789
+ "126178": {
790
+ "content": "<|reserved_token_94|>",
791
+ "lstrip": false,
792
+ "normalized": false,
793
+ "rstrip": false,
794
+ "single_word": false,
795
+ "special": true
796
+ },
797
+ "126179": {
798
+ "content": "<|reserved_token_95|>",
799
+ "lstrip": false,
800
+ "normalized": false,
801
+ "rstrip": false,
802
+ "single_word": false,
803
+ "special": true
804
+ },
805
+ "126180": {
806
+ "content": "<|reserved_token_96|>",
807
+ "lstrip": false,
808
+ "normalized": false,
809
+ "rstrip": false,
810
+ "single_word": false,
811
+ "special": true
812
+ },
813
+ "126181": {
814
+ "content": "<|reserved_token_97|>",
815
+ "lstrip": false,
816
+ "normalized": false,
817
+ "rstrip": false,
818
+ "single_word": false,
819
+ "special": true
820
+ },
821
+ "126182": {
822
+ "content": "<|reserved_token_98|>",
823
+ "lstrip": false,
824
+ "normalized": false,
825
+ "rstrip": false,
826
+ "single_word": false,
827
+ "special": true
828
+ },
829
+ "126183": {
830
+ "content": "<|reserved_token_99|>",
831
+ "lstrip": false,
832
+ "normalized": false,
833
+ "rstrip": false,
834
+ "single_word": false,
835
+ "special": true
836
+ },
837
+ "126184": {
838
+ "content": "<|reserved_token_100|>",
839
+ "lstrip": false,
840
+ "normalized": false,
841
+ "rstrip": false,
842
+ "single_word": false,
843
+ "special": true
844
+ },
845
+ "126185": {
846
+ "content": "<|reserved_token_101|>",
847
+ "lstrip": false,
848
+ "normalized": false,
849
+ "rstrip": false,
850
+ "single_word": false,
851
+ "special": true
852
+ },
853
+ "126186": {
854
+ "content": "<|reserved_token_102|>",
855
+ "lstrip": false,
856
+ "normalized": false,
857
+ "rstrip": false,
858
+ "single_word": false,
859
+ "special": true
860
+ },
861
+ "126187": {
862
+ "content": "<|reserved_token_103|>",
863
+ "lstrip": false,
864
+ "normalized": false,
865
+ "rstrip": false,
866
+ "single_word": false,
867
+ "special": true
868
+ },
869
+ "126188": {
870
+ "content": "<|reserved_token_104|>",
871
+ "lstrip": false,
872
+ "normalized": false,
873
+ "rstrip": false,
874
+ "single_word": false,
875
+ "special": true
876
+ },
877
+ "126189": {
878
+ "content": "<|reserved_token_105|>",
879
+ "lstrip": false,
880
+ "normalized": false,
881
+ "rstrip": false,
882
+ "single_word": false,
883
+ "special": true
884
+ },
885
+ "126190": {
886
+ "content": "<|reserved_token_106|>",
887
+ "lstrip": false,
888
+ "normalized": false,
889
+ "rstrip": false,
890
+ "single_word": false,
891
+ "special": true
892
+ },
893
+ "126191": {
894
+ "content": "<|reserved_token_107|>",
895
+ "lstrip": false,
896
+ "normalized": false,
897
+ "rstrip": false,
898
+ "single_word": false,
899
+ "special": true
900
+ },
901
+ "126192": {
902
+ "content": "<|reserved_token_108|>",
903
+ "lstrip": false,
904
+ "normalized": false,
905
+ "rstrip": false,
906
+ "single_word": false,
907
+ "special": true
908
+ },
909
+ "126193": {
910
+ "content": "<|reserved_token_109|>",
911
+ "lstrip": false,
912
+ "normalized": false,
913
+ "rstrip": false,
914
+ "single_word": false,
915
+ "special": true
916
+ },
917
+ "126194": {
918
+ "content": "<|reserved_token_110|>",
919
+ "lstrip": false,
920
+ "normalized": false,
921
+ "rstrip": false,
922
+ "single_word": false,
923
+ "special": true
924
+ },
925
+ "126195": {
926
+ "content": "<|reserved_token_111|>",
927
+ "lstrip": false,
928
+ "normalized": false,
929
+ "rstrip": false,
930
+ "single_word": false,
931
+ "special": true
932
+ },
933
+ "126196": {
934
+ "content": "<|reserved_token_112|>",
935
+ "lstrip": false,
936
+ "normalized": false,
937
+ "rstrip": false,
938
+ "single_word": false,
939
+ "special": true
940
+ },
941
+ "126197": {
942
+ "content": "<|reserved_token_113|>",
943
+ "lstrip": false,
944
+ "normalized": false,
945
+ "rstrip": false,
946
+ "single_word": false,
947
+ "special": true
948
+ },
949
+ "126198": {
950
+ "content": "<|reserved_token_114|>",
951
+ "lstrip": false,
952
+ "normalized": false,
953
+ "rstrip": false,
954
+ "single_word": false,
955
+ "special": true
956
+ },
957
+ "126199": {
958
+ "content": "<|reserved_token_115|>",
959
+ "lstrip": false,
960
+ "normalized": false,
961
+ "rstrip": false,
962
+ "single_word": false,
963
+ "special": true
964
+ },
965
+ "126200": {
966
+ "content": "<|reserved_token_116|>",
967
+ "lstrip": false,
968
+ "normalized": false,
969
+ "rstrip": false,
970
+ "single_word": false,
971
+ "special": true
972
+ },
973
+ "126201": {
974
+ "content": "<|reserved_token_117|>",
975
+ "lstrip": false,
976
+ "normalized": false,
977
+ "rstrip": false,
978
+ "single_word": false,
979
+ "special": true
980
+ },
981
+ "126202": {
982
+ "content": "<|reserved_token_118|>",
983
+ "lstrip": false,
984
+ "normalized": false,
985
+ "rstrip": false,
986
+ "single_word": false,
987
+ "special": true
988
+ },
989
+ "126203": {
990
+ "content": "<|reserved_token_119|>",
991
+ "lstrip": false,
992
+ "normalized": false,
993
+ "rstrip": false,
994
+ "single_word": false,
995
+ "special": true
996
+ },
997
+ "126204": {
998
+ "content": "<|reserved_token_120|>",
999
+ "lstrip": false,
1000
+ "normalized": false,
1001
+ "rstrip": false,
1002
+ "single_word": false,
1003
+ "special": true
1004
+ },
1005
+ "126205": {
1006
+ "content": "<|reserved_token_121|>",
1007
+ "lstrip": false,
1008
+ "normalized": false,
1009
+ "rstrip": false,
1010
+ "single_word": false,
1011
+ "special": true
1012
+ },
1013
+ "126206": {
1014
+ "content": "<|reserved_token_122|>",
1015
+ "lstrip": false,
1016
+ "normalized": false,
1017
+ "rstrip": false,
1018
+ "single_word": false,
1019
+ "special": true
1020
+ },
1021
+ "126207": {
1022
+ "content": "<|reserved_token_123|>",
1023
+ "lstrip": false,
1024
+ "normalized": false,
1025
+ "rstrip": false,
1026
+ "single_word": false,
1027
+ "special": true
1028
+ },
1029
+ "126208": {
1030
+ "content": "<|reserved_token_124|>",
1031
+ "lstrip": false,
1032
+ "normalized": false,
1033
+ "rstrip": false,
1034
+ "single_word": false,
1035
+ "special": true
1036
+ },
1037
+ "126209": {
1038
+ "content": "<|reserved_token_125|>",
1039
+ "lstrip": false,
1040
+ "normalized": false,
1041
+ "rstrip": false,
1042
+ "single_word": false,
1043
+ "special": true
1044
+ },
1045
+ "126210": {
1046
+ "content": "<|reserved_token_126|>",
1047
+ "lstrip": false,
1048
+ "normalized": false,
1049
+ "rstrip": false,
1050
+ "single_word": false,
1051
+ "special": true
1052
+ },
1053
+ "126211": {
1054
+ "content": "<|reserved_token_127|>",
1055
+ "lstrip": false,
1056
+ "normalized": false,
1057
+ "rstrip": false,
1058
+ "single_word": false,
1059
+ "special": true
1060
+ },
1061
+ "126212": {
1062
+ "content": "<|reserved_token_128|>",
1063
+ "lstrip": false,
1064
+ "normalized": false,
1065
+ "rstrip": false,
1066
+ "single_word": false,
1067
+ "special": true
1068
+ },
1069
+ "126213": {
1070
+ "content": "<|reserved_token_129|>",
1071
+ "lstrip": false,
1072
+ "normalized": false,
1073
+ "rstrip": false,
1074
+ "single_word": false,
1075
+ "special": true
1076
+ },
1077
+ "126214": {
1078
+ "content": "<|reserved_token_130|>",
1079
+ "lstrip": false,
1080
+ "normalized": false,
1081
+ "rstrip": false,
1082
+ "single_word": false,
1083
+ "special": true
1084
+ },
1085
+ "126215": {
1086
+ "content": "<|reserved_token_131|>",
1087
+ "lstrip": false,
1088
+ "normalized": false,
1089
+ "rstrip": false,
1090
+ "single_word": false,
1091
+ "special": true
1092
+ },
1093
+ "126216": {
1094
+ "content": "<|reserved_token_132|>",
1095
+ "lstrip": false,
1096
+ "normalized": false,
1097
+ "rstrip": false,
1098
+ "single_word": false,
1099
+ "special": true
1100
+ },
1101
+ "126217": {
1102
+ "content": "<|reserved_token_133|>",
1103
+ "lstrip": false,
1104
+ "normalized": false,
1105
+ "rstrip": false,
1106
+ "single_word": false,
1107
+ "special": true
1108
+ },
1109
+ "126218": {
1110
+ "content": "<|reserved_token_134|>",
1111
+ "lstrip": false,
1112
+ "normalized": false,
1113
+ "rstrip": false,
1114
+ "single_word": false,
1115
+ "special": true
1116
+ },
1117
+ "126219": {
1118
+ "content": "<|reserved_token_135|>",
1119
+ "lstrip": false,
1120
+ "normalized": false,
1121
+ "rstrip": false,
1122
+ "single_word": false,
1123
+ "special": true
1124
+ },
1125
+ "126220": {
1126
+ "content": "<|reserved_token_136|>",
1127
+ "lstrip": false,
1128
+ "normalized": false,
1129
+ "rstrip": false,
1130
+ "single_word": false,
1131
+ "special": true
1132
+ },
1133
+ "126221": {
1134
+ "content": "<|reserved_token_137|>",
1135
+ "lstrip": false,
1136
+ "normalized": false,
1137
+ "rstrip": false,
1138
+ "single_word": false,
1139
+ "special": true
1140
+ },
1141
+ "126222": {
1142
+ "content": "<|reserved_token_138|>",
1143
+ "lstrip": false,
1144
+ "normalized": false,
1145
+ "rstrip": false,
1146
+ "single_word": false,
1147
+ "special": true
1148
+ },
1149
+ "126223": {
1150
+ "content": "<|reserved_token_139|>",
1151
+ "lstrip": false,
1152
+ "normalized": false,
1153
+ "rstrip": false,
1154
+ "single_word": false,
1155
+ "special": true
1156
+ },
1157
+ "126224": {
1158
+ "content": "<|reserved_token_140|>",
1159
+ "lstrip": false,
1160
+ "normalized": false,
1161
+ "rstrip": false,
1162
+ "single_word": false,
1163
+ "special": true
1164
+ },
1165
+ "126225": {
1166
+ "content": "<|reserved_token_141|>",
1167
+ "lstrip": false,
1168
+ "normalized": false,
1169
+ "rstrip": false,
1170
+ "single_word": false,
1171
+ "special": true
1172
+ },
1173
+ "126226": {
1174
+ "content": "<|reserved_token_142|>",
1175
+ "lstrip": false,
1176
+ "normalized": false,
1177
+ "rstrip": false,
1178
+ "single_word": false,
1179
+ "special": true
1180
+ },
1181
+ "126227": {
1182
+ "content": "<|reserved_token_143|>",
1183
+ "lstrip": false,
1184
+ "normalized": false,
1185
+ "rstrip": false,
1186
+ "single_word": false,
1187
+ "special": true
1188
+ },
1189
+ "126228": {
1190
+ "content": "<|reserved_token_144|>",
1191
+ "lstrip": false,
1192
+ "normalized": false,
1193
+ "rstrip": false,
1194
+ "single_word": false,
1195
+ "special": true
1196
+ },
1197
+ "126229": {
1198
+ "content": "<|reserved_token_145|>",
1199
+ "lstrip": false,
1200
+ "normalized": false,
1201
+ "rstrip": false,
1202
+ "single_word": false,
1203
+ "special": true
1204
+ },
1205
+ "126230": {
1206
+ "content": "<|reserved_token_146|>",
1207
+ "lstrip": false,
1208
+ "normalized": false,
1209
+ "rstrip": false,
1210
+ "single_word": false,
1211
+ "special": true
1212
+ },
1213
+ "126231": {
1214
+ "content": "<|reserved_token_147|>",
1215
+ "lstrip": false,
1216
+ "normalized": false,
1217
+ "rstrip": false,
1218
+ "single_word": false,
1219
+ "special": true
1220
+ },
1221
+ "126232": {
1222
+ "content": "<|reserved_token_148|>",
1223
+ "lstrip": false,
1224
+ "normalized": false,
1225
+ "rstrip": false,
1226
+ "single_word": false,
1227
+ "special": true
1228
+ },
1229
+ "126233": {
1230
+ "content": "<|reserved_token_149|>",
1231
+ "lstrip": false,
1232
+ "normalized": false,
1233
+ "rstrip": false,
1234
+ "single_word": false,
1235
+ "special": true
1236
+ },
1237
+ "126234": {
1238
+ "content": "<|reserved_token_150|>",
1239
+ "lstrip": false,
1240
+ "normalized": false,
1241
+ "rstrip": false,
1242
+ "single_word": false,
1243
+ "special": true
1244
+ },
1245
+ "126235": {
1246
+ "content": "<|reserved_token_151|>",
1247
+ "lstrip": false,
1248
+ "normalized": false,
1249
+ "rstrip": false,
1250
+ "single_word": false,
1251
+ "special": true
1252
+ },
1253
+ "126236": {
1254
+ "content": "<|reserved_token_152|>",
1255
+ "lstrip": false,
1256
+ "normalized": false,
1257
+ "rstrip": false,
1258
+ "single_word": false,
1259
+ "special": true
1260
+ },
1261
+ "126237": {
1262
+ "content": "<|reserved_token_153|>",
1263
+ "lstrip": false,
1264
+ "normalized": false,
1265
+ "rstrip": false,
1266
+ "single_word": false,
1267
+ "special": true
1268
+ },
1269
+ "126238": {
1270
+ "content": "<|reserved_token_154|>",
1271
+ "lstrip": false,
1272
+ "normalized": false,
1273
+ "rstrip": false,
1274
+ "single_word": false,
1275
+ "special": true
1276
+ },
1277
+ "126239": {
1278
+ "content": "<|reserved_token_155|>",
1279
+ "lstrip": false,
1280
+ "normalized": false,
1281
+ "rstrip": false,
1282
+ "single_word": false,
1283
+ "special": true
1284
+ },
1285
+ "126240": {
1286
+ "content": "<|reserved_token_156|>",
1287
+ "lstrip": false,
1288
+ "normalized": false,
1289
+ "rstrip": false,
1290
+ "single_word": false,
1291
+ "special": true
1292
+ },
1293
+ "126241": {
1294
+ "content": "<|reserved_token_157|>",
1295
+ "lstrip": false,
1296
+ "normalized": false,
1297
+ "rstrip": false,
1298
+ "single_word": false,
1299
+ "special": true
1300
+ },
1301
+ "126242": {
1302
+ "content": "<|reserved_token_158|>",
1303
+ "lstrip": false,
1304
+ "normalized": false,
1305
+ "rstrip": false,
1306
+ "single_word": false,
1307
+ "special": true
1308
+ },
1309
+ "126243": {
1310
+ "content": "<|reserved_token_159|>",
1311
+ "lstrip": false,
1312
+ "normalized": false,
1313
+ "rstrip": false,
1314
+ "single_word": false,
1315
+ "special": true
1316
+ },
1317
+ "126244": {
1318
+ "content": "<|reserved_token_160|>",
1319
+ "lstrip": false,
1320
+ "normalized": false,
1321
+ "rstrip": false,
1322
+ "single_word": false,
1323
+ "special": true
1324
+ },
1325
+ "126245": {
1326
+ "content": "<|reserved_token_161|>",
1327
+ "lstrip": false,
1328
+ "normalized": false,
1329
+ "rstrip": false,
1330
+ "single_word": false,
1331
+ "special": true
1332
+ },
1333
+ "126246": {
1334
+ "content": "<|reserved_token_162|>",
1335
+ "lstrip": false,
1336
+ "normalized": false,
1337
+ "rstrip": false,
1338
+ "single_word": false,
1339
+ "special": true
1340
+ },
1341
+ "126247": {
1342
+ "content": "<|reserved_token_163|>",
1343
+ "lstrip": false,
1344
+ "normalized": false,
1345
+ "rstrip": false,
1346
+ "single_word": false,
1347
+ "special": true
1348
+ },
1349
+ "126248": {
1350
+ "content": "<|reserved_token_164|>",
1351
+ "lstrip": false,
1352
+ "normalized": false,
1353
+ "rstrip": false,
1354
+ "single_word": false,
1355
+ "special": true
1356
+ },
1357
+ "126249": {
1358
+ "content": "<|reserved_token_165|>",
1359
+ "lstrip": false,
1360
+ "normalized": false,
1361
+ "rstrip": false,
1362
+ "single_word": false,
1363
+ "special": true
1364
+ },
1365
+ "126250": {
1366
+ "content": "<|reserved_token_166|>",
1367
+ "lstrip": false,
1368
+ "normalized": false,
1369
+ "rstrip": false,
1370
+ "single_word": false,
1371
+ "special": true
1372
+ },
1373
+ "126251": {
1374
+ "content": "<|reserved_token_167|>",
1375
+ "lstrip": false,
1376
+ "normalized": false,
1377
+ "rstrip": false,
1378
+ "single_word": false,
1379
+ "special": true
1380
+ },
1381
+ "126252": {
1382
+ "content": "<|reserved_token_168|>",
1383
+ "lstrip": false,
1384
+ "normalized": false,
1385
+ "rstrip": false,
1386
+ "single_word": false,
1387
+ "special": true
1388
+ },
1389
+ "126253": {
1390
+ "content": "<|reserved_token_169|>",
1391
+ "lstrip": false,
1392
+ "normalized": false,
1393
+ "rstrip": false,
1394
+ "single_word": false,
1395
+ "special": true
1396
+ },
1397
+ "126254": {
1398
+ "content": "<|reserved_token_170|>",
1399
+ "lstrip": false,
1400
+ "normalized": false,
1401
+ "rstrip": false,
1402
+ "single_word": false,
1403
+ "special": true
1404
+ },
1405
+ "126255": {
1406
+ "content": "<|reserved_token_171|>",
1407
+ "lstrip": false,
1408
+ "normalized": false,
1409
+ "rstrip": false,
1410
+ "single_word": false,
1411
+ "special": true
1412
+ },
1413
+ "126256": {
1414
+ "content": "<|reserved_token_172|>",
1415
+ "lstrip": false,
1416
+ "normalized": false,
1417
+ "rstrip": false,
1418
+ "single_word": false,
1419
+ "special": true
1420
+ },
1421
+ "126257": {
1422
+ "content": "<|reserved_token_173|>",
1423
+ "lstrip": false,
1424
+ "normalized": false,
1425
+ "rstrip": false,
1426
+ "single_word": false,
1427
+ "special": true
1428
+ },
1429
+ "126258": {
1430
+ "content": "<|reserved_token_174|>",
1431
+ "lstrip": false,
1432
+ "normalized": false,
1433
+ "rstrip": false,
1434
+ "single_word": false,
1435
+ "special": true
1436
+ },
1437
+ "126259": {
1438
+ "content": "<|reserved_token_175|>",
1439
+ "lstrip": false,
1440
+ "normalized": false,
1441
+ "rstrip": false,
1442
+ "single_word": false,
1443
+ "special": true
1444
+ },
1445
+ "126260": {
1446
+ "content": "<|reserved_token_176|>",
1447
+ "lstrip": false,
1448
+ "normalized": false,
1449
+ "rstrip": false,
1450
+ "single_word": false,
1451
+ "special": true
1452
+ },
1453
+ "126261": {
1454
+ "content": "<|reserved_token_177|>",
1455
+ "lstrip": false,
1456
+ "normalized": false,
1457
+ "rstrip": false,
1458
+ "single_word": false,
1459
+ "special": true
1460
+ },
1461
+ "126262": {
1462
+ "content": "<|reserved_token_178|>",
1463
+ "lstrip": false,
1464
+ "normalized": false,
1465
+ "rstrip": false,
1466
+ "single_word": false,
1467
+ "special": true
1468
+ },
1469
+ "126263": {
1470
+ "content": "<|reserved_token_179|>",
1471
+ "lstrip": false,
1472
+ "normalized": false,
1473
+ "rstrip": false,
1474
+ "single_word": false,
1475
+ "special": true
1476
+ },
1477
+ "126264": {
1478
+ "content": "<|reserved_token_180|>",
1479
+ "lstrip": false,
1480
+ "normalized": false,
1481
+ "rstrip": false,
1482
+ "single_word": false,
1483
+ "special": true
1484
+ },
1485
+ "126265": {
1486
+ "content": "<|reserved_token_181|>",
1487
+ "lstrip": false,
1488
+ "normalized": false,
1489
+ "rstrip": false,
1490
+ "single_word": false,
1491
+ "special": true
1492
+ },
1493
+ "126266": {
1494
+ "content": "<|reserved_token_182|>",
1495
+ "lstrip": false,
1496
+ "normalized": false,
1497
+ "rstrip": false,
1498
+ "single_word": false,
1499
+ "special": true
1500
+ },
1501
+ "126267": {
1502
+ "content": "<|reserved_token_183|>",
1503
+ "lstrip": false,
1504
+ "normalized": false,
1505
+ "rstrip": false,
1506
+ "single_word": false,
1507
+ "special": true
1508
+ },
1509
+ "126268": {
1510
+ "content": "<|reserved_token_184|>",
1511
+ "lstrip": false,
1512
+ "normalized": false,
1513
+ "rstrip": false,
1514
+ "single_word": false,
1515
+ "special": true
1516
+ },
1517
+ "126269": {
1518
+ "content": "<|reserved_token_185|>",
1519
+ "lstrip": false,
1520
+ "normalized": false,
1521
+ "rstrip": false,
1522
+ "single_word": false,
1523
+ "special": true
1524
+ },
1525
+ "126270": {
1526
+ "content": "<|reserved_token_186|>",
1527
+ "lstrip": false,
1528
+ "normalized": false,
1529
+ "rstrip": false,
1530
+ "single_word": false,
1531
+ "special": true
1532
+ },
1533
+ "126271": {
1534
+ "content": "<|reserved_token_187|>",
1535
+ "lstrip": false,
1536
+ "normalized": false,
1537
+ "rstrip": false,
1538
+ "single_word": false,
1539
+ "special": true
1540
+ },
1541
+ "126272": {
1542
+ "content": "<|reserved_token_188|>",
1543
+ "lstrip": false,
1544
+ "normalized": false,
1545
+ "rstrip": false,
1546
+ "single_word": false,
1547
+ "special": true
1548
+ },
1549
+ "126273": {
1550
+ "content": "<|reserved_token_189|>",
1551
+ "lstrip": false,
1552
+ "normalized": false,
1553
+ "rstrip": false,
1554
+ "single_word": false,
1555
+ "special": true
1556
+ },
1557
+ "126274": {
1558
+ "content": "<|reserved_token_190|>",
1559
+ "lstrip": false,
1560
+ "normalized": false,
1561
+ "rstrip": false,
1562
+ "single_word": false,
1563
+ "special": true
1564
+ },
1565
+ "126275": {
1566
+ "content": "<|reserved_token_191|>",
1567
+ "lstrip": false,
1568
+ "normalized": false,
1569
+ "rstrip": false,
1570
+ "single_word": false,
1571
+ "special": true
1572
+ },
1573
+ "126276": {
1574
+ "content": "<|reserved_token_192|>",
1575
+ "lstrip": false,
1576
+ "normalized": false,
1577
+ "rstrip": false,
1578
+ "single_word": false,
1579
+ "special": true
1580
+ },
1581
+ "126277": {
1582
+ "content": "<|reserved_token_193|>",
1583
+ "lstrip": false,
1584
+ "normalized": false,
1585
+ "rstrip": false,
1586
+ "single_word": false,
1587
+ "special": true
1588
+ },
1589
+ "126278": {
1590
+ "content": "<|reserved_token_194|>",
1591
+ "lstrip": false,
1592
+ "normalized": false,
1593
+ "rstrip": false,
1594
+ "single_word": false,
1595
+ "special": true
1596
+ },
1597
+ "126279": {
1598
+ "content": "<|reserved_token_195|>",
1599
+ "lstrip": false,
1600
+ "normalized": false,
1601
+ "rstrip": false,
1602
+ "single_word": false,
1603
+ "special": true
1604
+ },
1605
+ "126280": {
1606
+ "content": "<|reserved_token_196|>",
1607
+ "lstrip": false,
1608
+ "normalized": false,
1609
+ "rstrip": false,
1610
+ "single_word": false,
1611
+ "special": true
1612
+ },
1613
+ "126281": {
1614
+ "content": "<|reserved_token_197|>",
1615
+ "lstrip": false,
1616
+ "normalized": false,
1617
+ "rstrip": false,
1618
+ "single_word": false,
1619
+ "special": true
1620
+ },
1621
+ "126282": {
1622
+ "content": "<|reserved_token_198|>",
1623
+ "lstrip": false,
1624
+ "normalized": false,
1625
+ "rstrip": false,
1626
+ "single_word": false,
1627
+ "special": true
1628
+ },
1629
+ "126283": {
1630
+ "content": "<|reserved_token_199|>",
1631
+ "lstrip": false,
1632
+ "normalized": false,
1633
+ "rstrip": false,
1634
+ "single_word": false,
1635
+ "special": true
1636
+ },
1637
+ "126284": {
1638
+ "content": "<|reserved_token_200|>",
1639
+ "lstrip": false,
1640
+ "normalized": false,
1641
+ "rstrip": false,
1642
+ "single_word": false,
1643
+ "special": true
1644
+ },
1645
+ "126285": {
1646
+ "content": "<|reserved_token_201|>",
1647
+ "lstrip": false,
1648
+ "normalized": false,
1649
+ "rstrip": false,
1650
+ "single_word": false,
1651
+ "special": true
1652
+ },
1653
+ "126286": {
1654
+ "content": "<|reserved_token_202|>",
1655
+ "lstrip": false,
1656
+ "normalized": false,
1657
+ "rstrip": false,
1658
+ "single_word": false,
1659
+ "special": true
1660
+ },
1661
+ "126287": {
1662
+ "content": "<|reserved_token_203|>",
1663
+ "lstrip": false,
1664
+ "normalized": false,
1665
+ "rstrip": false,
1666
+ "single_word": false,
1667
+ "special": true
1668
+ },
1669
+ "126288": {
1670
+ "content": "<|reserved_token_204|>",
1671
+ "lstrip": false,
1672
+ "normalized": false,
1673
+ "rstrip": false,
1674
+ "single_word": false,
1675
+ "special": true
1676
+ },
1677
+ "126289": {
1678
+ "content": "<|reserved_token_205|>",
1679
+ "lstrip": false,
1680
+ "normalized": false,
1681
+ "rstrip": false,
1682
+ "single_word": false,
1683
+ "special": true
1684
+ },
1685
+ "126290": {
1686
+ "content": "<|reserved_token_206|>",
1687
+ "lstrip": false,
1688
+ "normalized": false,
1689
+ "rstrip": false,
1690
+ "single_word": false,
1691
+ "special": true
1692
+ },
1693
+ "126291": {
1694
+ "content": "<|reserved_token_207|>",
1695
+ "lstrip": false,
1696
+ "normalized": false,
1697
+ "rstrip": false,
1698
+ "single_word": false,
1699
+ "special": true
1700
+ },
1701
+ "126292": {
1702
+ "content": "<|reserved_token_208|>",
1703
+ "lstrip": false,
1704
+ "normalized": false,
1705
+ "rstrip": false,
1706
+ "single_word": false,
1707
+ "special": true
1708
+ },
1709
+ "126293": {
1710
+ "content": "<|reserved_token_209|>",
1711
+ "lstrip": false,
1712
+ "normalized": false,
1713
+ "rstrip": false,
1714
+ "single_word": false,
1715
+ "special": true
1716
+ },
1717
+ "126294": {
1718
+ "content": "<|reserved_token_210|>",
1719
+ "lstrip": false,
1720
+ "normalized": false,
1721
+ "rstrip": false,
1722
+ "single_word": false,
1723
+ "special": true
1724
+ },
1725
+ "126295": {
1726
+ "content": "<|reserved_token_211|>",
1727
+ "lstrip": false,
1728
+ "normalized": false,
1729
+ "rstrip": false,
1730
+ "single_word": false,
1731
+ "special": true
1732
+ },
1733
+ "126296": {
1734
+ "content": "<|reserved_token_212|>",
1735
+ "lstrip": false,
1736
+ "normalized": false,
1737
+ "rstrip": false,
1738
+ "single_word": false,
1739
+ "special": true
1740
+ },
1741
+ "126297": {
1742
+ "content": "<|reserved_token_213|>",
1743
+ "lstrip": false,
1744
+ "normalized": false,
1745
+ "rstrip": false,
1746
+ "single_word": false,
1747
+ "special": true
1748
+ },
1749
+ "126298": {
1750
+ "content": "<|reserved_token_214|>",
1751
+ "lstrip": false,
1752
+ "normalized": false,
1753
+ "rstrip": false,
1754
+ "single_word": false,
1755
+ "special": true
1756
+ },
1757
+ "126299": {
1758
+ "content": "<|reserved_token_215|>",
1759
+ "lstrip": false,
1760
+ "normalized": false,
1761
+ "rstrip": false,
1762
+ "single_word": false,
1763
+ "special": true
1764
+ },
1765
+ "126300": {
1766
+ "content": "<|reserved_token_216|>",
1767
+ "lstrip": false,
1768
+ "normalized": false,
1769
+ "rstrip": false,
1770
+ "single_word": false,
1771
+ "special": true
1772
+ },
1773
+ "126301": {
1774
+ "content": "<|reserved_token_217|>",
1775
+ "lstrip": false,
1776
+ "normalized": false,
1777
+ "rstrip": false,
1778
+ "single_word": false,
1779
+ "special": true
1780
+ },
1781
+ "126302": {
1782
+ "content": "<|reserved_token_218|>",
1783
+ "lstrip": false,
1784
+ "normalized": false,
1785
+ "rstrip": false,
1786
+ "single_word": false,
1787
+ "special": true
1788
+ },
1789
+ "126303": {
1790
+ "content": "<|reserved_token_219|>",
1791
+ "lstrip": false,
1792
+ "normalized": false,
1793
+ "rstrip": false,
1794
+ "single_word": false,
1795
+ "special": true
1796
+ },
1797
+ "126304": {
1798
+ "content": "<|reserved_token_220|>",
1799
+ "lstrip": false,
1800
+ "normalized": false,
1801
+ "rstrip": false,
1802
+ "single_word": false,
1803
+ "special": true
1804
+ },
1805
+ "126305": {
1806
+ "content": "<|reserved_token_221|>",
1807
+ "lstrip": false,
1808
+ "normalized": false,
1809
+ "rstrip": false,
1810
+ "single_word": false,
1811
+ "special": true
1812
+ },
1813
+ "126306": {
1814
+ "content": "<|reserved_token_222|>",
1815
+ "lstrip": false,
1816
+ "normalized": false,
1817
+ "rstrip": false,
1818
+ "single_word": false,
1819
+ "special": true
1820
+ },
1821
+ "126307": {
1822
+ "content": "<|reserved_token_223|>",
1823
+ "lstrip": false,
1824
+ "normalized": false,
1825
+ "rstrip": false,
1826
+ "single_word": false,
1827
+ "special": true
1828
+ },
1829
+ "126308": {
1830
+ "content": "<|reserved_token_224|>",
1831
+ "lstrip": false,
1832
+ "normalized": false,
1833
+ "rstrip": false,
1834
+ "single_word": false,
1835
+ "special": true
1836
+ },
1837
+ "126309": {
1838
+ "content": "<|reserved_token_225|>",
1839
+ "lstrip": false,
1840
+ "normalized": false,
1841
+ "rstrip": false,
1842
+ "single_word": false,
1843
+ "special": true
1844
+ },
1845
+ "126310": {
1846
+ "content": "<|reserved_token_226|>",
1847
+ "lstrip": false,
1848
+ "normalized": false,
1849
+ "rstrip": false,
1850
+ "single_word": false,
1851
+ "special": true
1852
+ },
1853
+ "126311": {
1854
+ "content": "<|reserved_token_227|>",
1855
+ "lstrip": false,
1856
+ "normalized": false,
1857
+ "rstrip": false,
1858
+ "single_word": false,
1859
+ "special": true
1860
+ },
1861
+ "126312": {
1862
+ "content": "<|reserved_token_228|>",
1863
+ "lstrip": false,
1864
+ "normalized": false,
1865
+ "rstrip": false,
1866
+ "single_word": false,
1867
+ "special": true
1868
+ },
1869
+ "126313": {
1870
+ "content": "<|reserved_token_229|>",
1871
+ "lstrip": false,
1872
+ "normalized": false,
1873
+ "rstrip": false,
1874
+ "single_word": false,
1875
+ "special": true
1876
+ },
1877
+ "126314": {
1878
+ "content": "<|reserved_token_230|>",
1879
+ "lstrip": false,
1880
+ "normalized": false,
1881
+ "rstrip": false,
1882
+ "single_word": false,
1883
+ "special": true
1884
+ },
1885
+ "126315": {
1886
+ "content": "<|reserved_token_231|>",
1887
+ "lstrip": false,
1888
+ "normalized": false,
1889
+ "rstrip": false,
1890
+ "single_word": false,
1891
+ "special": true
1892
+ },
1893
+ "126316": {
1894
+ "content": "<|reserved_token_232|>",
1895
+ "lstrip": false,
1896
+ "normalized": false,
1897
+ "rstrip": false,
1898
+ "single_word": false,
1899
+ "special": true
1900
+ },
1901
+ "126317": {
1902
+ "content": "<|reserved_token_233|>",
1903
+ "lstrip": false,
1904
+ "normalized": false,
1905
+ "rstrip": false,
1906
+ "single_word": false,
1907
+ "special": true
1908
+ },
1909
+ "126318": {
1910
+ "content": "<|reserved_token_234|>",
1911
+ "lstrip": false,
1912
+ "normalized": false,
1913
+ "rstrip": false,
1914
+ "single_word": false,
1915
+ "special": true
1916
+ },
1917
+ "126319": {
1918
+ "content": "<|reserved_token_235|>",
1919
+ "lstrip": false,
1920
+ "normalized": false,
1921
+ "rstrip": false,
1922
+ "single_word": false,
1923
+ "special": true
1924
+ },
1925
+ "126320": {
1926
+ "content": "<|reserved_token_236|>",
1927
+ "lstrip": false,
1928
+ "normalized": false,
1929
+ "rstrip": false,
1930
+ "single_word": false,
1931
+ "special": true
1932
+ },
1933
+ "126321": {
1934
+ "content": "<|reserved_token_237|>",
1935
+ "lstrip": false,
1936
+ "normalized": false,
1937
+ "rstrip": false,
1938
+ "single_word": false,
1939
+ "special": true
1940
+ },
1941
+ "126322": {
1942
+ "content": "<|reserved_token_238|>",
1943
+ "lstrip": false,
1944
+ "normalized": false,
1945
+ "rstrip": false,
1946
+ "single_word": false,
1947
+ "special": true
1948
+ },
1949
+ "126323": {
1950
+ "content": "<|reserved_token_239|>",
1951
+ "lstrip": false,
1952
+ "normalized": false,
1953
+ "rstrip": false,
1954
+ "single_word": false,
1955
+ "special": true
1956
+ },
1957
+ "126324": {
1958
+ "content": "<|reserved_token_240|>",
1959
+ "lstrip": false,
1960
+ "normalized": false,
1961
+ "rstrip": false,
1962
+ "single_word": false,
1963
+ "special": true
1964
+ },
1965
+ "126325": {
1966
+ "content": "<|reserved_token_241|>",
1967
+ "lstrip": false,
1968
+ "normalized": false,
1969
+ "rstrip": false,
1970
+ "single_word": false,
1971
+ "special": true
1972
+ },
1973
+ "126326": {
1974
+ "content": "<|reserved_token_242|>",
1975
+ "lstrip": false,
1976
+ "normalized": false,
1977
+ "rstrip": false,
1978
+ "single_word": false,
1979
+ "special": true
1980
+ },
1981
+ "126327": {
1982
+ "content": "<|reserved_token_243|>",
1983
+ "lstrip": false,
1984
+ "normalized": false,
1985
+ "rstrip": false,
1986
+ "single_word": false,
1987
+ "special": true
1988
+ },
1989
+ "126328": {
1990
+ "content": "<|reserved_token_244|>",
1991
+ "lstrip": false,
1992
+ "normalized": false,
1993
+ "rstrip": false,
1994
+ "single_word": false,
1995
+ "special": true
1996
+ },
1997
+ "126329": {
1998
+ "content": "<|reserved_token_245|>",
1999
+ "lstrip": false,
2000
+ "normalized": false,
2001
+ "rstrip": false,
2002
+ "single_word": false,
2003
+ "special": true
2004
+ },
2005
+ "126330": {
2006
+ "content": "<|reserved_token_246|>",
2007
+ "lstrip": false,
2008
+ "normalized": false,
2009
+ "rstrip": false,
2010
+ "single_word": false,
2011
+ "special": true
2012
+ },
2013
+ "126331": {
2014
+ "content": "<|reserved_token_247|>",
2015
+ "lstrip": false,
2016
+ "normalized": false,
2017
+ "rstrip": false,
2018
+ "single_word": false,
2019
+ "special": true
2020
+ },
2021
+ "126332": {
2022
+ "content": "<|reserved_token_248|>",
2023
+ "lstrip": false,
2024
+ "normalized": false,
2025
+ "rstrip": false,
2026
+ "single_word": false,
2027
+ "special": true
2028
+ },
2029
+ "126333": {
2030
+ "content": "<|reserved_token_249|>",
2031
+ "lstrip": false,
2032
+ "normalized": false,
2033
+ "rstrip": false,
2034
+ "single_word": false,
2035
+ "special": true
2036
+ },
2037
+ "126334": {
2038
+ "content": "<|reserved_token_250|>",
2039
+ "lstrip": false,
2040
+ "normalized": false,
2041
+ "rstrip": false,
2042
+ "single_word": false,
2043
+ "special": true
2044
+ },
2045
+ "126335": {
2046
+ "content": "<|reserved_token_251|>",
2047
+ "lstrip": false,
2048
+ "normalized": false,
2049
+ "rstrip": false,
2050
+ "single_word": false,
2051
+ "special": true
2052
+ },
2053
+ "126336": {
2054
+ "content": "<|mdm_mask|>",
2055
+ "lstrip": false,
2056
+ "normalized": false,
2057
+ "rstrip": false,
2058
+ "single_word": false,
2059
+ "special": true
2060
+ },
2061
+ "126337": {
2062
+ "content": "<|reserved_token_253|>",
2063
+ "lstrip": false,
2064
+ "normalized": false,
2065
+ "rstrip": false,
2066
+ "single_word": false,
2067
+ "special": true
2068
+ },
2069
+ "126338": {
2070
+ "content": "<|reserved_token_254|>",
2071
+ "lstrip": false,
2072
+ "normalized": false,
2073
+ "rstrip": false,
2074
+ "single_word": false,
2075
+ "special": true
2076
+ },
2077
+ "126339": {
2078
+ "content": "<|reserved_token_255|>",
2079
+ "lstrip": false,
2080
+ "normalized": false,
2081
+ "rstrip": false,
2082
+ "single_word": false,
2083
+ "special": true
2084
+ },
2085
+ "126340": {
2086
+ "content": "<role>",
2087
+ "lstrip": false,
2088
+ "normalized": false,
2089
+ "rstrip": false,
2090
+ "single_word": false,
2091
+ "special": true
2092
+ },
2093
+ "126341": {
2094
+ "content": "</role>",
2095
+ "lstrip": false,
2096
+ "normalized": false,
2097
+ "rstrip": false,
2098
+ "single_word": false,
2099
+ "special": true
2100
+ },
2101
+ "126342": {
2102
+ "content": "<|arithmetic_start|>",
2103
+ "lstrip": false,
2104
+ "normalized": false,
2105
+ "rstrip": false,
2106
+ "single_word": false,
2107
+ "special": true
2108
+ },
2109
+ "126343": {
2110
+ "content": "<|arithmetic_end|>",
2111
+ "lstrip": false,
2112
+ "normalized": false,
2113
+ "rstrip": false,
2114
+ "single_word": false,
2115
+ "special": true
2116
+ },
2117
+ "126344": {
2118
+ "content": "<|number_start|>",
2119
+ "lstrip": false,
2120
+ "normalized": false,
2121
+ "rstrip": false,
2122
+ "single_word": false,
2123
+ "special": true
2124
+ },
2125
+ "126345": {
2126
+ "content": "<|number_end|>",
2127
+ "lstrip": false,
2128
+ "normalized": false,
2129
+ "rstrip": false,
2130
+ "single_word": false,
2131
+ "special": true
2132
+ },
2133
+ "126346": {
2134
+ "content": "<|start_header_id|>",
2135
+ "lstrip": false,
2136
+ "normalized": false,
2137
+ "rstrip": false,
2138
+ "single_word": false,
2139
+ "special": true
2140
+ },
2141
+ "126347": {
2142
+ "content": "<|end_header_id|>",
2143
+ "lstrip": false,
2144
+ "normalized": false,
2145
+ "rstrip": false,
2146
+ "single_word": false,
2147
+ "special": true
2148
+ },
2149
+ "126348": {
2150
+ "content": "<|eot_id|>",
2151
+ "lstrip": false,
2152
+ "normalized": false,
2153
+ "rstrip": false,
2154
+ "single_word": false,
2155
+ "special": true
2156
+ }
2157
+ },
2158
+ "additional_special_tokens": [
2159
+ "<role>",
2160
+ "</role>",
2161
+ "<|arithmetic_start|>",
2162
+ "<|arithmetic_end|>",
2163
+ "<|number_start|>",
2164
+ "<|number_end|>"
2165
+ ],
2166
+ "bos_token": "<|startoftext|>",
2167
+ "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}",
2168
+ "clean_up_tokenization_spaces": false,
2169
+ "cls_token": "[CLS]",
2170
+ "eos_token": "<|endoftext|>",
2171
+ "extra_special_tokens": {},
2172
+ "fast_tokenizer": true,
2173
+ "gmask_token": "[gMASK]",
2174
+ "merges_file": null,
2175
+ "model_input_names": [
2176
+ "input_ids",
2177
+ "attention_mask"
2178
+ ],
2179
+ "model_max_length": 2048,
2180
+ "pad_token": "<|endoftext|>",
2181
+ "padding_side": "right",
2182
+ "tokenizer_class": "PreTrainedTokenizer",
2183
+ "trust_remote_code": true
2184
+ }
trainer_state.json ADDED
@@ -0,0 +1,3781 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 2.9873772791023843,
6
+ "eval_steps": 500,
7
+ "global_step": 534,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.005610098176718092,
14
+ "grad_norm": 14.482704425563409,
15
+ "learning_rate": 1.1764705882352942e-06,
16
+ "loss": 2.4829,
17
+ "step": 1
18
+ },
19
+ {
20
+ "epoch": 0.011220196353436185,
21
+ "grad_norm": 14.997611711402312,
22
+ "learning_rate": 2.3529411764705885e-06,
23
+ "loss": 2.4709,
24
+ "step": 2
25
+ },
26
+ {
27
+ "epoch": 0.016830294530154277,
28
+ "grad_norm": 22.92634314434291,
29
+ "learning_rate": 3.529411764705883e-06,
30
+ "loss": 2.5972,
31
+ "step": 3
32
+ },
33
+ {
34
+ "epoch": 0.02244039270687237,
35
+ "grad_norm": 26.08298552783473,
36
+ "learning_rate": 4.705882352941177e-06,
37
+ "loss": 2.9709,
38
+ "step": 4
39
+ },
40
+ {
41
+ "epoch": 0.028050490883590462,
42
+ "grad_norm": 10.439925915517989,
43
+ "learning_rate": 5.882352941176471e-06,
44
+ "loss": 1.9377,
45
+ "step": 5
46
+ },
47
+ {
48
+ "epoch": 0.033660589060308554,
49
+ "grad_norm": 10.400122544958307,
50
+ "learning_rate": 7.058823529411766e-06,
51
+ "loss": 2.022,
52
+ "step": 6
53
+ },
54
+ {
55
+ "epoch": 0.03927068723702665,
56
+ "grad_norm": 14.00552108501915,
57
+ "learning_rate": 8.23529411764706e-06,
58
+ "loss": 2.2036,
59
+ "step": 7
60
+ },
61
+ {
62
+ "epoch": 0.04488078541374474,
63
+ "grad_norm": 7.983448917228911,
64
+ "learning_rate": 9.411764705882354e-06,
65
+ "loss": 1.7996,
66
+ "step": 8
67
+ },
68
+ {
69
+ "epoch": 0.05049088359046283,
70
+ "grad_norm": 14.607698611118,
71
+ "learning_rate": 1.0588235294117648e-05,
72
+ "loss": 1.7783,
73
+ "step": 9
74
+ },
75
+ {
76
+ "epoch": 0.056100981767180924,
77
+ "grad_norm": 18.30713741135737,
78
+ "learning_rate": 1.1764705882352942e-05,
79
+ "loss": 2.3484,
80
+ "step": 10
81
+ },
82
+ {
83
+ "epoch": 0.061711079943899017,
84
+ "grad_norm": 13.136001683680378,
85
+ "learning_rate": 1.2941176470588238e-05,
86
+ "loss": 2.1121,
87
+ "step": 11
88
+ },
89
+ {
90
+ "epoch": 0.06732117812061711,
91
+ "grad_norm": 10.26556944471482,
92
+ "learning_rate": 1.4117647058823532e-05,
93
+ "loss": 1.6633,
94
+ "step": 12
95
+ },
96
+ {
97
+ "epoch": 0.0729312762973352,
98
+ "grad_norm": 20.81334203768653,
99
+ "learning_rate": 1.5294117647058822e-05,
100
+ "loss": 1.8455,
101
+ "step": 13
102
+ },
103
+ {
104
+ "epoch": 0.0785413744740533,
105
+ "grad_norm": 25.844425401837857,
106
+ "learning_rate": 1.647058823529412e-05,
107
+ "loss": 1.9926,
108
+ "step": 14
109
+ },
110
+ {
111
+ "epoch": 0.08415147265077139,
112
+ "grad_norm": 11.674830089702823,
113
+ "learning_rate": 1.7647058823529414e-05,
114
+ "loss": 2.0823,
115
+ "step": 15
116
+ },
117
+ {
118
+ "epoch": 0.08976157082748948,
119
+ "grad_norm": 10.106380196868269,
120
+ "learning_rate": 1.8823529411764708e-05,
121
+ "loss": 1.6956,
122
+ "step": 16
123
+ },
124
+ {
125
+ "epoch": 0.09537166900420757,
126
+ "grad_norm": 13.239082572950917,
127
+ "learning_rate": 2e-05,
128
+ "loss": 1.6792,
129
+ "step": 17
130
+ },
131
+ {
132
+ "epoch": 0.10098176718092566,
133
+ "grad_norm": 14.095678950101247,
134
+ "learning_rate": 1.9999833838717115e-05,
135
+ "loss": 1.762,
136
+ "step": 18
137
+ },
138
+ {
139
+ "epoch": 0.10659186535764376,
140
+ "grad_norm": 10.75361523047076,
141
+ "learning_rate": 1.9999335361003903e-05,
142
+ "loss": 1.7206,
143
+ "step": 19
144
+ },
145
+ {
146
+ "epoch": 0.11220196353436185,
147
+ "grad_norm": 20.72078305937572,
148
+ "learning_rate": 1.9998504585266525e-05,
149
+ "loss": 1.7202,
150
+ "step": 20
151
+ },
152
+ {
153
+ "epoch": 0.11781206171107994,
154
+ "grad_norm": 8.174316973115733,
155
+ "learning_rate": 1.9997341542181152e-05,
156
+ "loss": 1.5443,
157
+ "step": 21
158
+ },
159
+ {
160
+ "epoch": 0.12342215988779803,
161
+ "grad_norm": 11.02373772633163,
162
+ "learning_rate": 1.9995846274692837e-05,
163
+ "loss": 1.7834,
164
+ "step": 22
165
+ },
166
+ {
167
+ "epoch": 0.12903225806451613,
168
+ "grad_norm": 12.907132265179525,
169
+ "learning_rate": 1.999401883801392e-05,
170
+ "loss": 1.6106,
171
+ "step": 23
172
+ },
173
+ {
174
+ "epoch": 0.13464235624123422,
175
+ "grad_norm": 11.666249990206644,
176
+ "learning_rate": 1.9991859299622017e-05,
177
+ "loss": 1.8916,
178
+ "step": 24
179
+ },
180
+ {
181
+ "epoch": 0.1402524544179523,
182
+ "grad_norm": 11.755169146289523,
183
+ "learning_rate": 1.9989367739257487e-05,
184
+ "loss": 1.4958,
185
+ "step": 25
186
+ },
187
+ {
188
+ "epoch": 0.1458625525946704,
189
+ "grad_norm": 14.21252914739186,
190
+ "learning_rate": 1.9986544248920533e-05,
191
+ "loss": 1.5,
192
+ "step": 26
193
+ },
194
+ {
195
+ "epoch": 0.1514726507713885,
196
+ "grad_norm": 9.52592172144483,
197
+ "learning_rate": 1.9983388932867766e-05,
198
+ "loss": 1.5516,
199
+ "step": 27
200
+ },
201
+ {
202
+ "epoch": 0.1570827489481066,
203
+ "grad_norm": 9.2197257965334,
204
+ "learning_rate": 1.997990190760838e-05,
205
+ "loss": 1.671,
206
+ "step": 28
207
+ },
208
+ {
209
+ "epoch": 0.16269284712482468,
210
+ "grad_norm": 9.623981436749904,
211
+ "learning_rate": 1.997608330189984e-05,
212
+ "loss": 1.7939,
213
+ "step": 29
214
+ },
215
+ {
216
+ "epoch": 0.16830294530154277,
217
+ "grad_norm": 9.961693725270193,
218
+ "learning_rate": 1.9971933256743125e-05,
219
+ "loss": 1.6443,
220
+ "step": 30
221
+ },
222
+ {
223
+ "epoch": 0.17391304347826086,
224
+ "grad_norm": 10.206809416567838,
225
+ "learning_rate": 1.9967451925377536e-05,
226
+ "loss": 1.6938,
227
+ "step": 31
228
+ },
229
+ {
230
+ "epoch": 0.17952314165497896,
231
+ "grad_norm": 17.28004161872836,
232
+ "learning_rate": 1.9962639473275015e-05,
233
+ "loss": 1.3949,
234
+ "step": 32
235
+ },
236
+ {
237
+ "epoch": 0.18513323983169705,
238
+ "grad_norm": 10.213936219479217,
239
+ "learning_rate": 1.995749607813406e-05,
240
+ "loss": 1.651,
241
+ "step": 33
242
+ },
243
+ {
244
+ "epoch": 0.19074333800841514,
245
+ "grad_norm": 10.931208369493481,
246
+ "learning_rate": 1.9952021929873144e-05,
247
+ "loss": 1.7842,
248
+ "step": 34
249
+ },
250
+ {
251
+ "epoch": 0.19635343618513323,
252
+ "grad_norm": 14.257442478586555,
253
+ "learning_rate": 1.994621723062371e-05,
254
+ "loss": 1.874,
255
+ "step": 35
256
+ },
257
+ {
258
+ "epoch": 0.20196353436185133,
259
+ "grad_norm": 11.657566039275686,
260
+ "learning_rate": 1.9940082194722707e-05,
261
+ "loss": 1.7837,
262
+ "step": 36
263
+ },
264
+ {
265
+ "epoch": 0.20757363253856942,
266
+ "grad_norm": 14.960516245890997,
267
+ "learning_rate": 1.9933617048704677e-05,
268
+ "loss": 1.8665,
269
+ "step": 37
270
+ },
271
+ {
272
+ "epoch": 0.2131837307152875,
273
+ "grad_norm": 10.898489261648411,
274
+ "learning_rate": 1.992682203129339e-05,
275
+ "loss": 1.6721,
276
+ "step": 38
277
+ },
278
+ {
279
+ "epoch": 0.2187938288920056,
280
+ "grad_norm": 11.367240208375325,
281
+ "learning_rate": 1.991969739339302e-05,
282
+ "loss": 1.5697,
283
+ "step": 39
284
+ },
285
+ {
286
+ "epoch": 0.2244039270687237,
287
+ "grad_norm": 33.69171915018733,
288
+ "learning_rate": 1.9912243398078905e-05,
289
+ "loss": 1.7683,
290
+ "step": 40
291
+ },
292
+ {
293
+ "epoch": 0.2300140252454418,
294
+ "grad_norm": 16.319511202580383,
295
+ "learning_rate": 1.9904460320587797e-05,
296
+ "loss": 1.6084,
297
+ "step": 41
298
+ },
299
+ {
300
+ "epoch": 0.23562412342215988,
301
+ "grad_norm": 169.4523919717209,
302
+ "learning_rate": 1.989634844830773e-05,
303
+ "loss": 1.3691,
304
+ "step": 42
305
+ },
306
+ {
307
+ "epoch": 0.24123422159887797,
308
+ "grad_norm": 10.82766829997138,
309
+ "learning_rate": 1.9887908080767395e-05,
310
+ "loss": 1.4698,
311
+ "step": 43
312
+ },
313
+ {
314
+ "epoch": 0.24684431977559607,
315
+ "grad_norm": 13.183269542497765,
316
+ "learning_rate": 1.9879139529625078e-05,
317
+ "loss": 1.6484,
318
+ "step": 44
319
+ },
320
+ {
321
+ "epoch": 0.25245441795231416,
322
+ "grad_norm": 13.74453433039017,
323
+ "learning_rate": 1.987004311865716e-05,
324
+ "loss": 1.682,
325
+ "step": 45
326
+ },
327
+ {
328
+ "epoch": 0.25806451612903225,
329
+ "grad_norm": 25.284432212762642,
330
+ "learning_rate": 1.986061918374616e-05,
331
+ "loss": 1.7417,
332
+ "step": 46
333
+ },
334
+ {
335
+ "epoch": 0.26367461430575034,
336
+ "grad_norm": 10.320026390736263,
337
+ "learning_rate": 1.9850868072868316e-05,
338
+ "loss": 1.6057,
339
+ "step": 47
340
+ },
341
+ {
342
+ "epoch": 0.26928471248246844,
343
+ "grad_norm": 11.710218180919735,
344
+ "learning_rate": 1.984079014608077e-05,
345
+ "loss": 1.6101,
346
+ "step": 48
347
+ },
348
+ {
349
+ "epoch": 0.27489481065918653,
350
+ "grad_norm": 19.568770924414054,
351
+ "learning_rate": 1.9830385775508233e-05,
352
+ "loss": 1.6445,
353
+ "step": 49
354
+ },
355
+ {
356
+ "epoch": 0.2805049088359046,
357
+ "grad_norm": 22.598203806640846,
358
+ "learning_rate": 1.9819655345329284e-05,
359
+ "loss": 1.4283,
360
+ "step": 50
361
+ },
362
+ {
363
+ "epoch": 0.2861150070126227,
364
+ "grad_norm": 15.19582950857613,
365
+ "learning_rate": 1.9808599251762145e-05,
366
+ "loss": 1.3715,
367
+ "step": 51
368
+ },
369
+ {
370
+ "epoch": 0.2917251051893408,
371
+ "grad_norm": 12.621837235302998,
372
+ "learning_rate": 1.979721790305009e-05,
373
+ "loss": 1.5691,
374
+ "step": 52
375
+ },
376
+ {
377
+ "epoch": 0.2973352033660589,
378
+ "grad_norm": 9.153518106932475,
379
+ "learning_rate": 1.978551171944634e-05,
380
+ "loss": 1.4436,
381
+ "step": 53
382
+ },
383
+ {
384
+ "epoch": 0.302945301542777,
385
+ "grad_norm": 9.895264955740506,
386
+ "learning_rate": 1.977348113319855e-05,
387
+ "loss": 1.6643,
388
+ "step": 54
389
+ },
390
+ {
391
+ "epoch": 0.3085553997194951,
392
+ "grad_norm": 23.27095547139845,
393
+ "learning_rate": 1.9761126588532875e-05,
394
+ "loss": 1.7594,
395
+ "step": 55
396
+ },
397
+ {
398
+ "epoch": 0.3141654978962132,
399
+ "grad_norm": 16.900190113880402,
400
+ "learning_rate": 1.974844854163753e-05,
401
+ "loss": 1.4728,
402
+ "step": 56
403
+ },
404
+ {
405
+ "epoch": 0.31977559607293127,
406
+ "grad_norm": 12.905411605150357,
407
+ "learning_rate": 1.9735447460645966e-05,
408
+ "loss": 1.4458,
409
+ "step": 57
410
+ },
411
+ {
412
+ "epoch": 0.32538569424964936,
413
+ "grad_norm": 31.337793336880274,
414
+ "learning_rate": 1.972212382561958e-05,
415
+ "loss": 1.5244,
416
+ "step": 58
417
+ },
418
+ {
419
+ "epoch": 0.33099579242636745,
420
+ "grad_norm": 16.41143055596397,
421
+ "learning_rate": 1.9708478128530002e-05,
422
+ "loss": 1.5635,
423
+ "step": 59
424
+ },
425
+ {
426
+ "epoch": 0.33660589060308554,
427
+ "grad_norm": 13.430850712385256,
428
+ "learning_rate": 1.9694510873240895e-05,
429
+ "loss": 1.575,
430
+ "step": 60
431
+ },
432
+ {
433
+ "epoch": 0.34221598877980364,
434
+ "grad_norm": 12.426823261781168,
435
+ "learning_rate": 1.9680222575489392e-05,
436
+ "loss": 1.5527,
437
+ "step": 61
438
+ },
439
+ {
440
+ "epoch": 0.34782608695652173,
441
+ "grad_norm": 11.631979677867267,
442
+ "learning_rate": 1.9665613762867016e-05,
443
+ "loss": 1.6676,
444
+ "step": 62
445
+ },
446
+ {
447
+ "epoch": 0.3534361851332398,
448
+ "grad_norm": 12.593635334214698,
449
+ "learning_rate": 1.965068497480022e-05,
450
+ "loss": 1.521,
451
+ "step": 63
452
+ },
453
+ {
454
+ "epoch": 0.3590462833099579,
455
+ "grad_norm": 29.46666265441875,
456
+ "learning_rate": 1.963543676253048e-05,
457
+ "loss": 1.1823,
458
+ "step": 64
459
+ },
460
+ {
461
+ "epoch": 0.364656381486676,
462
+ "grad_norm": 17.08155790602239,
463
+ "learning_rate": 1.9619869689093893e-05,
464
+ "loss": 1.4791,
465
+ "step": 65
466
+ },
467
+ {
468
+ "epoch": 0.3702664796633941,
469
+ "grad_norm": 17.295026461260033,
470
+ "learning_rate": 1.9603984329300443e-05,
471
+ "loss": 1.835,
472
+ "step": 66
473
+ },
474
+ {
475
+ "epoch": 0.3758765778401122,
476
+ "grad_norm": 28.11302094956105,
477
+ "learning_rate": 1.9587781269712743e-05,
478
+ "loss": 1.7666,
479
+ "step": 67
480
+ },
481
+ {
482
+ "epoch": 0.3814866760168303,
483
+ "grad_norm": 15.648509062666934,
484
+ "learning_rate": 1.9571261108624386e-05,
485
+ "loss": 1.5831,
486
+ "step": 68
487
+ },
488
+ {
489
+ "epoch": 0.3870967741935484,
490
+ "grad_norm": 11.295454125938116,
491
+ "learning_rate": 1.9554424456037856e-05,
492
+ "loss": 1.5546,
493
+ "step": 69
494
+ },
495
+ {
496
+ "epoch": 0.39270687237026647,
497
+ "grad_norm": 20.86600149222171,
498
+ "learning_rate": 1.9537271933641986e-05,
499
+ "loss": 1.5649,
500
+ "step": 70
501
+ },
502
+ {
503
+ "epoch": 0.39831697054698456,
504
+ "grad_norm": 12.413407895378603,
505
+ "learning_rate": 1.9519804174789037e-05,
506
+ "loss": 1.4971,
507
+ "step": 71
508
+ },
509
+ {
510
+ "epoch": 0.40392706872370265,
511
+ "grad_norm": 39.57009669926718,
512
+ "learning_rate": 1.950202182447127e-05,
513
+ "loss": 1.6082,
514
+ "step": 72
515
+ },
516
+ {
517
+ "epoch": 0.40953716690042075,
518
+ "grad_norm": 31.34749362616257,
519
+ "learning_rate": 1.948392553929717e-05,
520
+ "loss": 1.5098,
521
+ "step": 73
522
+ },
523
+ {
524
+ "epoch": 0.41514726507713884,
525
+ "grad_norm": 14.945037268714179,
526
+ "learning_rate": 1.9465515987467167e-05,
527
+ "loss": 1.7393,
528
+ "step": 74
529
+ },
530
+ {
531
+ "epoch": 0.42075736325385693,
532
+ "grad_norm": 47.715394673395544,
533
+ "learning_rate": 1.9446793848748988e-05,
534
+ "loss": 1.5907,
535
+ "step": 75
536
+ },
537
+ {
538
+ "epoch": 0.426367461430575,
539
+ "grad_norm": 22.307210800183924,
540
+ "learning_rate": 1.9427759814452535e-05,
541
+ "loss": 1.8286,
542
+ "step": 76
543
+ },
544
+ {
545
+ "epoch": 0.4319775596072931,
546
+ "grad_norm": 10.619179031781488,
547
+ "learning_rate": 1.940841458740438e-05,
548
+ "loss": 1.5184,
549
+ "step": 77
550
+ },
551
+ {
552
+ "epoch": 0.4375876577840112,
553
+ "grad_norm": 14.094325579812917,
554
+ "learning_rate": 1.9388758881921802e-05,
555
+ "loss": 1.4232,
556
+ "step": 78
557
+ },
558
+ {
559
+ "epoch": 0.4431977559607293,
560
+ "grad_norm": 19.72560091354988,
561
+ "learning_rate": 1.93687934237864e-05,
562
+ "loss": 1.6243,
563
+ "step": 79
564
+ },
565
+ {
566
+ "epoch": 0.4488078541374474,
567
+ "grad_norm": 19.753840295719044,
568
+ "learning_rate": 1.9348518950217327e-05,
569
+ "loss": 1.5801,
570
+ "step": 80
571
+ },
572
+ {
573
+ "epoch": 0.4544179523141655,
574
+ "grad_norm": 20.345860072141033,
575
+ "learning_rate": 1.932793620984403e-05,
576
+ "loss": 1.604,
577
+ "step": 81
578
+ },
579
+ {
580
+ "epoch": 0.4600280504908836,
581
+ "grad_norm": 16.101034336903197,
582
+ "learning_rate": 1.9307045962678633e-05,
583
+ "loss": 1.8381,
584
+ "step": 82
585
+ },
586
+ {
587
+ "epoch": 0.46563814866760167,
588
+ "grad_norm": 23.043596084469865,
589
+ "learning_rate": 1.928584898008786e-05,
590
+ "loss": 1.3656,
591
+ "step": 83
592
+ },
593
+ {
594
+ "epoch": 0.47124824684431976,
595
+ "grad_norm": 14.0013318273939,
596
+ "learning_rate": 1.9264346044764563e-05,
597
+ "loss": 1.6921,
598
+ "step": 84
599
+ },
600
+ {
601
+ "epoch": 0.47685834502103785,
602
+ "grad_norm": 10.088671867334291,
603
+ "learning_rate": 1.9242537950698808e-05,
604
+ "loss": 1.7664,
605
+ "step": 85
606
+ },
607
+ {
608
+ "epoch": 0.48246844319775595,
609
+ "grad_norm": 13.096601724817155,
610
+ "learning_rate": 1.9220425503148573e-05,
611
+ "loss": 1.6365,
612
+ "step": 86
613
+ },
614
+ {
615
+ "epoch": 0.48807854137447404,
616
+ "grad_norm": 10.293199079674654,
617
+ "learning_rate": 1.9198009518610006e-05,
618
+ "loss": 1.6372,
619
+ "step": 87
620
+ },
621
+ {
622
+ "epoch": 0.49368863955119213,
623
+ "grad_norm": 17.45696409243125,
624
+ "learning_rate": 1.917529082478727e-05,
625
+ "loss": 1.9688,
626
+ "step": 88
627
+ },
628
+ {
629
+ "epoch": 0.4992987377279102,
630
+ "grad_norm": 12.501332529093151,
631
+ "learning_rate": 1.9152270260561986e-05,
632
+ "loss": 1.7185,
633
+ "step": 89
634
+ },
635
+ {
636
+ "epoch": 0.5049088359046283,
637
+ "grad_norm": 9.567511469828927,
638
+ "learning_rate": 1.912894867596227e-05,
639
+ "loss": 1.5637,
640
+ "step": 90
641
+ },
642
+ {
643
+ "epoch": 0.5105189340813464,
644
+ "grad_norm": 10.588869643202907,
645
+ "learning_rate": 1.9105326932131318e-05,
646
+ "loss": 1.4875,
647
+ "step": 91
648
+ },
649
+ {
650
+ "epoch": 0.5161290322580645,
651
+ "grad_norm": 25.375515168691575,
652
+ "learning_rate": 1.9081405901295637e-05,
653
+ "loss": 1.5908,
654
+ "step": 92
655
+ },
656
+ {
657
+ "epoch": 0.5217391304347826,
658
+ "grad_norm": 34.618538302204044,
659
+ "learning_rate": 1.905718646673282e-05,
660
+ "loss": 1.73,
661
+ "step": 93
662
+ },
663
+ {
664
+ "epoch": 0.5273492286115007,
665
+ "grad_norm": 9.481578038519926,
666
+ "learning_rate": 1.9032669522738933e-05,
667
+ "loss": 1.4905,
668
+ "step": 94
669
+ },
670
+ {
671
+ "epoch": 0.5329593267882188,
672
+ "grad_norm": 15.199014159234617,
673
+ "learning_rate": 1.9007855974595508e-05,
674
+ "loss": 1.6711,
675
+ "step": 95
676
+ },
677
+ {
678
+ "epoch": 0.5385694249649369,
679
+ "grad_norm": 12.401899473450863,
680
+ "learning_rate": 1.8982746738536097e-05,
681
+ "loss": 1.4282,
682
+ "step": 96
683
+ },
684
+ {
685
+ "epoch": 0.544179523141655,
686
+ "grad_norm": 15.694607543801956,
687
+ "learning_rate": 1.8957342741712444e-05,
688
+ "loss": 1.6658,
689
+ "step": 97
690
+ },
691
+ {
692
+ "epoch": 0.5497896213183731,
693
+ "grad_norm": 10.78564261946902,
694
+ "learning_rate": 1.8931644922160274e-05,
695
+ "loss": 1.5895,
696
+ "step": 98
697
+ },
698
+ {
699
+ "epoch": 0.5553997194950911,
700
+ "grad_norm": 17.60784194989688,
701
+ "learning_rate": 1.8905654228764614e-05,
702
+ "loss": 1.4861,
703
+ "step": 99
704
+ },
705
+ {
706
+ "epoch": 0.5610098176718092,
707
+ "grad_norm": 14.053092600296077,
708
+ "learning_rate": 1.8879371621224787e-05,
709
+ "loss": 1.3578,
710
+ "step": 100
711
+ },
712
+ {
713
+ "epoch": 0.5666199158485273,
714
+ "grad_norm": 12.259536566697912,
715
+ "learning_rate": 1.8852798070018974e-05,
716
+ "loss": 1.4281,
717
+ "step": 101
718
+ },
719
+ {
720
+ "epoch": 0.5722300140252454,
721
+ "grad_norm": 13.137644468308233,
722
+ "learning_rate": 1.882593455636836e-05,
723
+ "loss": 1.3722,
724
+ "step": 102
725
+ },
726
+ {
727
+ "epoch": 0.5778401122019635,
728
+ "grad_norm": 9.494614193722693,
729
+ "learning_rate": 1.8798782072200924e-05,
730
+ "loss": 1.4653,
731
+ "step": 103
732
+ },
733
+ {
734
+ "epoch": 0.5834502103786816,
735
+ "grad_norm": 16.144017929658787,
736
+ "learning_rate": 1.8771341620114796e-05,
737
+ "loss": 1.5071,
738
+ "step": 104
739
+ },
740
+ {
741
+ "epoch": 0.5890603085553997,
742
+ "grad_norm": 14.436242746383904,
743
+ "learning_rate": 1.8743614213341256e-05,
744
+ "loss": 1.6482,
745
+ "step": 105
746
+ },
747
+ {
748
+ "epoch": 0.5946704067321178,
749
+ "grad_norm": 10.994748673603036,
750
+ "learning_rate": 1.8715600875707294e-05,
751
+ "loss": 1.4865,
752
+ "step": 106
753
+ },
754
+ {
755
+ "epoch": 0.6002805049088359,
756
+ "grad_norm": 12.3974862231508,
757
+ "learning_rate": 1.8687302641597824e-05,
758
+ "loss": 1.4387,
759
+ "step": 107
760
+ },
761
+ {
762
+ "epoch": 0.605890603085554,
763
+ "grad_norm": 15.733090579327103,
764
+ "learning_rate": 1.8658720555917487e-05,
765
+ "loss": 1.6455,
766
+ "step": 108
767
+ },
768
+ {
769
+ "epoch": 0.6115007012622721,
770
+ "grad_norm": 8.115989009375005,
771
+ "learning_rate": 1.862985567405207e-05,
772
+ "loss": 1.3164,
773
+ "step": 109
774
+ },
775
+ {
776
+ "epoch": 0.6171107994389902,
777
+ "grad_norm": 9.584544195435662,
778
+ "learning_rate": 1.860070906182953e-05,
779
+ "loss": 1.4764,
780
+ "step": 110
781
+ },
782
+ {
783
+ "epoch": 0.6227208976157083,
784
+ "grad_norm": 8.898340243711843,
785
+ "learning_rate": 1.8571281795480632e-05,
786
+ "loss": 1.5719,
787
+ "step": 111
788
+ },
789
+ {
790
+ "epoch": 0.6283309957924264,
791
+ "grad_norm": 22.902730670382045,
792
+ "learning_rate": 1.8541574961599233e-05,
793
+ "loss": 1.51,
794
+ "step": 112
795
+ },
796
+ {
797
+ "epoch": 0.6339410939691444,
798
+ "grad_norm": 12.53384464527916,
799
+ "learning_rate": 1.8511589657102137e-05,
800
+ "loss": 1.4294,
801
+ "step": 113
802
+ },
803
+ {
804
+ "epoch": 0.6395511921458625,
805
+ "grad_norm": 41.90777459667712,
806
+ "learning_rate": 1.8481326989188603e-05,
807
+ "loss": 1.4984,
808
+ "step": 114
809
+ },
810
+ {
811
+ "epoch": 0.6451612903225806,
812
+ "grad_norm": 12.684090261010915,
813
+ "learning_rate": 1.845078807529946e-05,
814
+ "loss": 1.3514,
815
+ "step": 115
816
+ },
817
+ {
818
+ "epoch": 0.6507713884992987,
819
+ "grad_norm": 9.037066734768192,
820
+ "learning_rate": 1.8419974043075842e-05,
821
+ "loss": 1.6403,
822
+ "step": 116
823
+ },
824
+ {
825
+ "epoch": 0.6563814866760168,
826
+ "grad_norm": 12.27393138414365,
827
+ "learning_rate": 1.838888603031756e-05,
828
+ "loss": 1.5217,
829
+ "step": 117
830
+ },
831
+ {
832
+ "epoch": 0.6619915848527349,
833
+ "grad_norm": 13.591447680784224,
834
+ "learning_rate": 1.8357525184941065e-05,
835
+ "loss": 1.6687,
836
+ "step": 118
837
+ },
838
+ {
839
+ "epoch": 0.667601683029453,
840
+ "grad_norm": 24.839843874937873,
841
+ "learning_rate": 1.8325892664937098e-05,
842
+ "loss": 1.6273,
843
+ "step": 119
844
+ },
845
+ {
846
+ "epoch": 0.6732117812061711,
847
+ "grad_norm": 40.24703694848467,
848
+ "learning_rate": 1.8293989638327906e-05,
849
+ "loss": 1.5715,
850
+ "step": 120
851
+ },
852
+ {
853
+ "epoch": 0.6788218793828892,
854
+ "grad_norm": 13.660745551221684,
855
+ "learning_rate": 1.8261817283124115e-05,
856
+ "loss": 1.4114,
857
+ "step": 121
858
+ },
859
+ {
860
+ "epoch": 0.6844319775596073,
861
+ "grad_norm": 35.89402465913394,
862
+ "learning_rate": 1.822937678728124e-05,
863
+ "loss": 1.3927,
864
+ "step": 122
865
+ },
866
+ {
867
+ "epoch": 0.6900420757363254,
868
+ "grad_norm": 11.308872740776515,
869
+ "learning_rate": 1.8196669348655817e-05,
870
+ "loss": 1.3174,
871
+ "step": 123
872
+ },
873
+ {
874
+ "epoch": 0.6956521739130435,
875
+ "grad_norm": 10.263689306849406,
876
+ "learning_rate": 1.8163696174961167e-05,
877
+ "loss": 1.5547,
878
+ "step": 124
879
+ },
880
+ {
881
+ "epoch": 0.7012622720897616,
882
+ "grad_norm": 13.801928996749705,
883
+ "learning_rate": 1.8130458483722814e-05,
884
+ "loss": 1.5886,
885
+ "step": 125
886
+ },
887
+ {
888
+ "epoch": 0.7068723702664796,
889
+ "grad_norm": 19.35669184267192,
890
+ "learning_rate": 1.809695750223351e-05,
891
+ "loss": 1.5143,
892
+ "step": 126
893
+ },
894
+ {
895
+ "epoch": 0.7124824684431977,
896
+ "grad_norm": 10.891218633996896,
897
+ "learning_rate": 1.806319446750795e-05,
898
+ "loss": 1.4685,
899
+ "step": 127
900
+ },
901
+ {
902
+ "epoch": 0.7180925666199158,
903
+ "grad_norm": 28.696150917144198,
904
+ "learning_rate": 1.802917062623705e-05,
905
+ "loss": 2.0259,
906
+ "step": 128
907
+ },
908
+ {
909
+ "epoch": 0.7237026647966339,
910
+ "grad_norm": 15.890258346122694,
911
+ "learning_rate": 1.7994887234741944e-05,
912
+ "loss": 1.4751,
913
+ "step": 129
914
+ },
915
+ {
916
+ "epoch": 0.729312762973352,
917
+ "grad_norm": 8.63985122916741,
918
+ "learning_rate": 1.7960345558927597e-05,
919
+ "loss": 1.2119,
920
+ "step": 130
921
+ },
922
+ {
923
+ "epoch": 0.7349228611500701,
924
+ "grad_norm": 18.584698267002675,
925
+ "learning_rate": 1.7925546874236043e-05,
926
+ "loss": 1.4341,
927
+ "step": 131
928
+ },
929
+ {
930
+ "epoch": 0.7405329593267882,
931
+ "grad_norm": 14.450827757949208,
932
+ "learning_rate": 1.7890492465599302e-05,
933
+ "loss": 1.608,
934
+ "step": 132
935
+ },
936
+ {
937
+ "epoch": 0.7461430575035063,
938
+ "grad_norm": 15.571705745790306,
939
+ "learning_rate": 1.785518362739193e-05,
940
+ "loss": 1.5671,
941
+ "step": 133
942
+ },
943
+ {
944
+ "epoch": 0.7517531556802244,
945
+ "grad_norm": 12.605049972249128,
946
+ "learning_rate": 1.7819621663383233e-05,
947
+ "loss": 1.4163,
948
+ "step": 134
949
+ },
950
+ {
951
+ "epoch": 0.7573632538569425,
952
+ "grad_norm": 11.318493509461359,
953
+ "learning_rate": 1.778380788668911e-05,
954
+ "loss": 1.5184,
955
+ "step": 135
956
+ },
957
+ {
958
+ "epoch": 0.7629733520336606,
959
+ "grad_norm": 10.337554569071575,
960
+ "learning_rate": 1.7747743619723576e-05,
961
+ "loss": 1.4598,
962
+ "step": 136
963
+ },
964
+ {
965
+ "epoch": 0.7685834502103787,
966
+ "grad_norm": 18.136003012039577,
967
+ "learning_rate": 1.771143019414994e-05,
968
+ "loss": 1.3933,
969
+ "step": 137
970
+ },
971
+ {
972
+ "epoch": 0.7741935483870968,
973
+ "grad_norm": 23.63615446575357,
974
+ "learning_rate": 1.7674868950831622e-05,
975
+ "loss": 1.476,
976
+ "step": 138
977
+ },
978
+ {
979
+ "epoch": 0.7798036465638148,
980
+ "grad_norm": 12.27648365093833,
981
+ "learning_rate": 1.763806123978263e-05,
982
+ "loss": 1.4272,
983
+ "step": 139
984
+ },
985
+ {
986
+ "epoch": 0.7854137447405329,
987
+ "grad_norm": 18.76442955908805,
988
+ "learning_rate": 1.7601008420117752e-05,
989
+ "loss": 1.5662,
990
+ "step": 140
991
+ },
992
+ {
993
+ "epoch": 0.791023842917251,
994
+ "grad_norm": 17.2691920820149,
995
+ "learning_rate": 1.756371186000233e-05,
996
+ "loss": 1.6274,
997
+ "step": 141
998
+ },
999
+ {
1000
+ "epoch": 0.7966339410939691,
1001
+ "grad_norm": 34.883527695882705,
1002
+ "learning_rate": 1.7526172936601757e-05,
1003
+ "loss": 1.4771,
1004
+ "step": 142
1005
+ },
1006
+ {
1007
+ "epoch": 0.8022440392706872,
1008
+ "grad_norm": 23.56533760346909,
1009
+ "learning_rate": 1.748839303603062e-05,
1010
+ "loss": 1.6044,
1011
+ "step": 143
1012
+ },
1013
+ {
1014
+ "epoch": 0.8078541374474053,
1015
+ "grad_norm": 57.11894729578249,
1016
+ "learning_rate": 1.7450373553301543e-05,
1017
+ "loss": 1.3727,
1018
+ "step": 144
1019
+ },
1020
+ {
1021
+ "epoch": 0.8134642356241234,
1022
+ "grad_norm": 8.265029503541601,
1023
+ "learning_rate": 1.741211589227363e-05,
1024
+ "loss": 1.3591,
1025
+ "step": 145
1026
+ },
1027
+ {
1028
+ "epoch": 0.8190743338008415,
1029
+ "grad_norm": 138.3935650140209,
1030
+ "learning_rate": 1.7373621465600675e-05,
1031
+ "loss": 1.425,
1032
+ "step": 146
1033
+ },
1034
+ {
1035
+ "epoch": 0.8246844319775596,
1036
+ "grad_norm": 24.359106304785023,
1037
+ "learning_rate": 1.733489169467897e-05,
1038
+ "loss": 1.5576,
1039
+ "step": 147
1040
+ },
1041
+ {
1042
+ "epoch": 0.8302945301542777,
1043
+ "grad_norm": 9.862752909579248,
1044
+ "learning_rate": 1.7295928009594828e-05,
1045
+ "loss": 1.4147,
1046
+ "step": 148
1047
+ },
1048
+ {
1049
+ "epoch": 0.8359046283309958,
1050
+ "grad_norm": 23.537743248039938,
1051
+ "learning_rate": 1.7256731849071784e-05,
1052
+ "loss": 1.6989,
1053
+ "step": 149
1054
+ },
1055
+ {
1056
+ "epoch": 0.8415147265077139,
1057
+ "grad_norm": 14.04775976293614,
1058
+ "learning_rate": 1.721730466041746e-05,
1059
+ "loss": 1.4941,
1060
+ "step": 150
1061
+ },
1062
+ {
1063
+ "epoch": 0.847124824684432,
1064
+ "grad_norm": 12.168431113203132,
1065
+ "learning_rate": 1.717764789947014e-05,
1066
+ "loss": 1.3478,
1067
+ "step": 151
1068
+ },
1069
+ {
1070
+ "epoch": 0.85273492286115,
1071
+ "grad_norm": 14.932661016457688,
1072
+ "learning_rate": 1.7137763030544993e-05,
1073
+ "loss": 1.468,
1074
+ "step": 152
1075
+ },
1076
+ {
1077
+ "epoch": 0.8583450210378681,
1078
+ "grad_norm": 16.571205525117712,
1079
+ "learning_rate": 1.709765152638002e-05,
1080
+ "loss": 1.7317,
1081
+ "step": 153
1082
+ },
1083
+ {
1084
+ "epoch": 0.8639551192145862,
1085
+ "grad_norm": 14.117927089933083,
1086
+ "learning_rate": 1.7057314868081657e-05,
1087
+ "loss": 1.4775,
1088
+ "step": 154
1089
+ },
1090
+ {
1091
+ "epoch": 0.8695652173913043,
1092
+ "grad_norm": 13.662454658161607,
1093
+ "learning_rate": 1.7016754545070106e-05,
1094
+ "loss": 1.7002,
1095
+ "step": 155
1096
+ },
1097
+ {
1098
+ "epoch": 0.8751753155680224,
1099
+ "grad_norm": 16.492645273733498,
1100
+ "learning_rate": 1.6975972055024322e-05,
1101
+ "loss": 1.5245,
1102
+ "step": 156
1103
+ },
1104
+ {
1105
+ "epoch": 0.8807854137447405,
1106
+ "grad_norm": 16.82235019269752,
1107
+ "learning_rate": 1.693496890382672e-05,
1108
+ "loss": 1.7002,
1109
+ "step": 157
1110
+ },
1111
+ {
1112
+ "epoch": 0.8863955119214586,
1113
+ "grad_norm": 9.585860481675596,
1114
+ "learning_rate": 1.6893746605507567e-05,
1115
+ "loss": 1.4924,
1116
+ "step": 158
1117
+ },
1118
+ {
1119
+ "epoch": 0.8920056100981767,
1120
+ "grad_norm": 14.39753403643121,
1121
+ "learning_rate": 1.685230668218908e-05,
1122
+ "loss": 1.2832,
1123
+ "step": 159
1124
+ },
1125
+ {
1126
+ "epoch": 0.8976157082748948,
1127
+ "grad_norm": 17.068205819906623,
1128
+ "learning_rate": 1.681065066402922e-05,
1129
+ "loss": 1.3861,
1130
+ "step": 160
1131
+ },
1132
+ {
1133
+ "epoch": 0.9032258064516129,
1134
+ "grad_norm": 15.23446199758208,
1135
+ "learning_rate": 1.6768780089165196e-05,
1136
+ "loss": 1.3945,
1137
+ "step": 161
1138
+ },
1139
+ {
1140
+ "epoch": 0.908835904628331,
1141
+ "grad_norm": 13.204881161532553,
1142
+ "learning_rate": 1.672669650365665e-05,
1143
+ "loss": 1.5229,
1144
+ "step": 162
1145
+ },
1146
+ {
1147
+ "epoch": 0.9144460028050491,
1148
+ "grad_norm": 10.822302558242614,
1149
+ "learning_rate": 1.66844014614286e-05,
1150
+ "loss": 1.4491,
1151
+ "step": 163
1152
+ },
1153
+ {
1154
+ "epoch": 0.9200561009817672,
1155
+ "grad_norm": 13.40338254856901,
1156
+ "learning_rate": 1.6641896524214037e-05,
1157
+ "loss": 1.5619,
1158
+ "step": 164
1159
+ },
1160
+ {
1161
+ "epoch": 0.9256661991584852,
1162
+ "grad_norm": 17.69450894792508,
1163
+ "learning_rate": 1.6599183261496278e-05,
1164
+ "loss": 1.4905,
1165
+ "step": 165
1166
+ },
1167
+ {
1168
+ "epoch": 0.9312762973352033,
1169
+ "grad_norm": 13.944664173351105,
1170
+ "learning_rate": 1.6556263250450995e-05,
1171
+ "loss": 1.3102,
1172
+ "step": 166
1173
+ },
1174
+ {
1175
+ "epoch": 0.9368863955119214,
1176
+ "grad_norm": 22.354089172464942,
1177
+ "learning_rate": 1.6513138075887982e-05,
1178
+ "loss": 1.4281,
1179
+ "step": 167
1180
+ },
1181
+ {
1182
+ "epoch": 0.9424964936886395,
1183
+ "grad_norm": 12.888048658450844,
1184
+ "learning_rate": 1.6469809330192644e-05,
1185
+ "loss": 1.2822,
1186
+ "step": 168
1187
+ },
1188
+ {
1189
+ "epoch": 0.9481065918653576,
1190
+ "grad_norm": 14.132338918324168,
1191
+ "learning_rate": 1.642627861326721e-05,
1192
+ "loss": 1.6548,
1193
+ "step": 169
1194
+ },
1195
+ {
1196
+ "epoch": 0.9537166900420757,
1197
+ "grad_norm": 17.359600887821536,
1198
+ "learning_rate": 1.638254753247162e-05,
1199
+ "loss": 1.5647,
1200
+ "step": 170
1201
+ },
1202
+ {
1203
+ "epoch": 0.9593267882187938,
1204
+ "grad_norm": 13.222024309183224,
1205
+ "learning_rate": 1.63386177025642e-05,
1206
+ "loss": 1.634,
1207
+ "step": 171
1208
+ },
1209
+ {
1210
+ "epoch": 0.9649368863955119,
1211
+ "grad_norm": 17.14309674478443,
1212
+ "learning_rate": 1.6294490745642044e-05,
1213
+ "loss": 1.4728,
1214
+ "step": 172
1215
+ },
1216
+ {
1217
+ "epoch": 0.97054698457223,
1218
+ "grad_norm": 15.152308719340205,
1219
+ "learning_rate": 1.6250168291081095e-05,
1220
+ "loss": 1.1627,
1221
+ "step": 173
1222
+ },
1223
+ {
1224
+ "epoch": 0.9761570827489481,
1225
+ "grad_norm": 15.348018396503871,
1226
+ "learning_rate": 1.6205651975476013e-05,
1227
+ "loss": 1.3184,
1228
+ "step": 174
1229
+ },
1230
+ {
1231
+ "epoch": 0.9817671809256662,
1232
+ "grad_norm": 14.030740196357252,
1233
+ "learning_rate": 1.61609434425797e-05,
1234
+ "loss": 1.5187,
1235
+ "step": 175
1236
+ },
1237
+ {
1238
+ "epoch": 0.9873772791023843,
1239
+ "grad_norm": 12.340831899480172,
1240
+ "learning_rate": 1.6116044343242643e-05,
1241
+ "loss": 1.4763,
1242
+ "step": 176
1243
+ },
1244
+ {
1245
+ "epoch": 0.9929873772791024,
1246
+ "grad_norm": 12.874192526939726,
1247
+ "learning_rate": 1.607095633535194e-05,
1248
+ "loss": 1.2938,
1249
+ "step": 177
1250
+ },
1251
+ {
1252
+ "epoch": 0.9985974754558204,
1253
+ "grad_norm": 14.359777239220444,
1254
+ "learning_rate": 1.6025681083770094e-05,
1255
+ "loss": 1.4761,
1256
+ "step": 178
1257
+ },
1258
+ {
1259
+ "epoch": 1.0,
1260
+ "grad_norm": 14.359777239220444,
1261
+ "learning_rate": 1.5980220260273517e-05,
1262
+ "loss": 0.4634,
1263
+ "step": 179
1264
+ },
1265
+ {
1266
+ "epoch": 1.0056100981767182,
1267
+ "grad_norm": 14.690822186999577,
1268
+ "learning_rate": 1.5934575543490827e-05,
1269
+ "loss": 1.7605,
1270
+ "step": 180
1271
+ },
1272
+ {
1273
+ "epoch": 1.0112201963534362,
1274
+ "grad_norm": 19.381128219060844,
1275
+ "learning_rate": 1.588874861884084e-05,
1276
+ "loss": 1.3774,
1277
+ "step": 181
1278
+ },
1279
+ {
1280
+ "epoch": 1.0168302945301542,
1281
+ "grad_norm": 13.764508415825427,
1282
+ "learning_rate": 1.5842741178470356e-05,
1283
+ "loss": 1.3739,
1284
+ "step": 182
1285
+ },
1286
+ {
1287
+ "epoch": 1.0224403927068724,
1288
+ "grad_norm": 16.824436408164395,
1289
+ "learning_rate": 1.5796554921191666e-05,
1290
+ "loss": 1.4756,
1291
+ "step": 183
1292
+ },
1293
+ {
1294
+ "epoch": 1.0280504908835906,
1295
+ "grad_norm": 10.377967714732288,
1296
+ "learning_rate": 1.5750191552419834e-05,
1297
+ "loss": 1.2113,
1298
+ "step": 184
1299
+ },
1300
+ {
1301
+ "epoch": 1.0336605890603086,
1302
+ "grad_norm": 7.564193930068618,
1303
+ "learning_rate": 1.5703652784109704e-05,
1304
+ "loss": 1.1498,
1305
+ "step": 185
1306
+ },
1307
+ {
1308
+ "epoch": 1.0392706872370265,
1309
+ "grad_norm": 11.965238823644222,
1310
+ "learning_rate": 1.565694033469271e-05,
1311
+ "loss": 1.5756,
1312
+ "step": 186
1313
+ },
1314
+ {
1315
+ "epoch": 1.0448807854137447,
1316
+ "grad_norm": 14.689919910120953,
1317
+ "learning_rate": 1.5610055929013422e-05,
1318
+ "loss": 1.4575,
1319
+ "step": 187
1320
+ },
1321
+ {
1322
+ "epoch": 1.050490883590463,
1323
+ "grad_norm": 22.539021928944944,
1324
+ "learning_rate": 1.5563001298265826e-05,
1325
+ "loss": 1.4551,
1326
+ "step": 188
1327
+ },
1328
+ {
1329
+ "epoch": 1.056100981767181,
1330
+ "grad_norm": 19.696373241342833,
1331
+ "learning_rate": 1.5515778179929444e-05,
1332
+ "loss": 1.6987,
1333
+ "step": 189
1334
+ },
1335
+ {
1336
+ "epoch": 1.061711079943899,
1337
+ "grad_norm": 12.848174310076084,
1338
+ "learning_rate": 1.5468388317705145e-05,
1339
+ "loss": 1.4525,
1340
+ "step": 190
1341
+ },
1342
+ {
1343
+ "epoch": 1.067321178120617,
1344
+ "grad_norm": 15.431121772973855,
1345
+ "learning_rate": 1.5420833461450773e-05,
1346
+ "loss": 1.5082,
1347
+ "step": 191
1348
+ },
1349
+ {
1350
+ "epoch": 1.0729312762973353,
1351
+ "grad_norm": 16.165697573890178,
1352
+ "learning_rate": 1.5373115367116532e-05,
1353
+ "loss": 1.5569,
1354
+ "step": 192
1355
+ },
1356
+ {
1357
+ "epoch": 1.0785413744740533,
1358
+ "grad_norm": 11.167625826904903,
1359
+ "learning_rate": 1.5325235796680155e-05,
1360
+ "loss": 1.4128,
1361
+ "step": 193
1362
+ },
1363
+ {
1364
+ "epoch": 1.0841514726507713,
1365
+ "grad_norm": 14.862256623967552,
1366
+ "learning_rate": 1.5277196518081826e-05,
1367
+ "loss": 1.4904,
1368
+ "step": 194
1369
+ },
1370
+ {
1371
+ "epoch": 1.0897615708274895,
1372
+ "grad_norm": 14.141157026149255,
1373
+ "learning_rate": 1.5228999305158915e-05,
1374
+ "loss": 1.42,
1375
+ "step": 195
1376
+ },
1377
+ {
1378
+ "epoch": 1.0953716690042077,
1379
+ "grad_norm": 25.352183474376506,
1380
+ "learning_rate": 1.5180645937580474e-05,
1381
+ "loss": 1.4163,
1382
+ "step": 196
1383
+ },
1384
+ {
1385
+ "epoch": 1.1009817671809257,
1386
+ "grad_norm": 10.055312720961254,
1387
+ "learning_rate": 1.5132138200781523e-05,
1388
+ "loss": 1.0222,
1389
+ "step": 197
1390
+ },
1391
+ {
1392
+ "epoch": 1.1065918653576436,
1393
+ "grad_norm": 13.796900933925889,
1394
+ "learning_rate": 1.5083477885897122e-05,
1395
+ "loss": 1.759,
1396
+ "step": 198
1397
+ },
1398
+ {
1399
+ "epoch": 1.1122019635343618,
1400
+ "grad_norm": 12.433488935186917,
1401
+ "learning_rate": 1.5034666789696239e-05,
1402
+ "loss": 1.3977,
1403
+ "step": 199
1404
+ },
1405
+ {
1406
+ "epoch": 1.11781206171108,
1407
+ "grad_norm": 18.535695256124203,
1408
+ "learning_rate": 1.4985706714515394e-05,
1409
+ "loss": 1.6248,
1410
+ "step": 200
1411
+ },
1412
+ {
1413
+ "epoch": 1.123422159887798,
1414
+ "grad_norm": 12.996630182675128,
1415
+ "learning_rate": 1.4936599468192128e-05,
1416
+ "loss": 1.5048,
1417
+ "step": 201
1418
+ },
1419
+ {
1420
+ "epoch": 1.129032258064516,
1421
+ "grad_norm": 10.587914458016622,
1422
+ "learning_rate": 1.4887346863998226e-05,
1423
+ "loss": 1.3652,
1424
+ "step": 202
1425
+ },
1426
+ {
1427
+ "epoch": 1.1346423562412342,
1428
+ "grad_norm": 10.705843179552653,
1429
+ "learning_rate": 1.4837950720572777e-05,
1430
+ "loss": 1.3677,
1431
+ "step": 203
1432
+ },
1433
+ {
1434
+ "epoch": 1.1402524544179524,
1435
+ "grad_norm": 9.860740526338978,
1436
+ "learning_rate": 1.4788412861855018e-05,
1437
+ "loss": 1.4839,
1438
+ "step": 204
1439
+ },
1440
+ {
1441
+ "epoch": 1.1458625525946704,
1442
+ "grad_norm": 46.04382771539066,
1443
+ "learning_rate": 1.473873511701698e-05,
1444
+ "loss": 1.6699,
1445
+ "step": 205
1446
+ },
1447
+ {
1448
+ "epoch": 1.1514726507713884,
1449
+ "grad_norm": 21.90608450068067,
1450
+ "learning_rate": 1.4688919320395958e-05,
1451
+ "loss": 1.5415,
1452
+ "step": 206
1453
+ },
1454
+ {
1455
+ "epoch": 1.1570827489481066,
1456
+ "grad_norm": 11.517380135013592,
1457
+ "learning_rate": 1.4638967311426769e-05,
1458
+ "loss": 1.4757,
1459
+ "step": 207
1460
+ },
1461
+ {
1462
+ "epoch": 1.1626928471248248,
1463
+ "grad_norm": 22.45752042141559,
1464
+ "learning_rate": 1.4588880934573832e-05,
1465
+ "loss": 1.1128,
1466
+ "step": 208
1467
+ },
1468
+ {
1469
+ "epoch": 1.1683029453015428,
1470
+ "grad_norm": 15.859260925124287,
1471
+ "learning_rate": 1.4538662039263067e-05,
1472
+ "loss": 1.3533,
1473
+ "step": 209
1474
+ },
1475
+ {
1476
+ "epoch": 1.1739130434782608,
1477
+ "grad_norm": 11.911198424099664,
1478
+ "learning_rate": 1.4488312479813598e-05,
1479
+ "loss": 1.5547,
1480
+ "step": 210
1481
+ },
1482
+ {
1483
+ "epoch": 1.179523141654979,
1484
+ "grad_norm": 15.032622370869328,
1485
+ "learning_rate": 1.4437834115369293e-05,
1486
+ "loss": 1.5527,
1487
+ "step": 211
1488
+ },
1489
+ {
1490
+ "epoch": 1.1851332398316972,
1491
+ "grad_norm": 13.163219102851839,
1492
+ "learning_rate": 1.4387228809830107e-05,
1493
+ "loss": 1.4507,
1494
+ "step": 212
1495
+ },
1496
+ {
1497
+ "epoch": 1.1907433380084151,
1498
+ "grad_norm": 11.55782752088526,
1499
+ "learning_rate": 1.4336498431783255e-05,
1500
+ "loss": 1.5752,
1501
+ "step": 213
1502
+ },
1503
+ {
1504
+ "epoch": 1.1963534361851331,
1505
+ "grad_norm": 8.939604195134024,
1506
+ "learning_rate": 1.428564485443423e-05,
1507
+ "loss": 1.2458,
1508
+ "step": 214
1509
+ },
1510
+ {
1511
+ "epoch": 1.2019635343618513,
1512
+ "grad_norm": 13.076558990973293,
1513
+ "learning_rate": 1.4234669955537615e-05,
1514
+ "loss": 1.5947,
1515
+ "step": 215
1516
+ },
1517
+ {
1518
+ "epoch": 1.2075736325385695,
1519
+ "grad_norm": 29.61425820551926,
1520
+ "learning_rate": 1.4183575617327774e-05,
1521
+ "loss": 1.4435,
1522
+ "step": 216
1523
+ },
1524
+ {
1525
+ "epoch": 1.2131837307152875,
1526
+ "grad_norm": 17.139513231632446,
1527
+ "learning_rate": 1.413236372644932e-05,
1528
+ "loss": 1.5416,
1529
+ "step": 217
1530
+ },
1531
+ {
1532
+ "epoch": 1.2187938288920055,
1533
+ "grad_norm": 11.538723658632415,
1534
+ "learning_rate": 1.4081036173887473e-05,
1535
+ "loss": 1.2324,
1536
+ "step": 218
1537
+ },
1538
+ {
1539
+ "epoch": 1.2244039270687237,
1540
+ "grad_norm": 11.952863288381689,
1541
+ "learning_rate": 1.402959485489823e-05,
1542
+ "loss": 1.509,
1543
+ "step": 219
1544
+ },
1545
+ {
1546
+ "epoch": 1.230014025245442,
1547
+ "grad_norm": 19.47240211007032,
1548
+ "learning_rate": 1.3978041668938383e-05,
1549
+ "loss": 1.6639,
1550
+ "step": 220
1551
+ },
1552
+ {
1553
+ "epoch": 1.2356241234221599,
1554
+ "grad_norm": 15.07334914756095,
1555
+ "learning_rate": 1.3926378519595374e-05,
1556
+ "loss": 1.7645,
1557
+ "step": 221
1558
+ },
1559
+ {
1560
+ "epoch": 1.2412342215988779,
1561
+ "grad_norm": 22.910378041692734,
1562
+ "learning_rate": 1.3874607314517021e-05,
1563
+ "loss": 1.5825,
1564
+ "step": 222
1565
+ },
1566
+ {
1567
+ "epoch": 1.246844319775596,
1568
+ "grad_norm": 15.057996546883961,
1569
+ "learning_rate": 1.3822729965341078e-05,
1570
+ "loss": 1.2952,
1571
+ "step": 223
1572
+ },
1573
+ {
1574
+ "epoch": 1.2524544179523143,
1575
+ "grad_norm": 10.933853220414331,
1576
+ "learning_rate": 1.3770748387624621e-05,
1577
+ "loss": 1.292,
1578
+ "step": 224
1579
+ },
1580
+ {
1581
+ "epoch": 1.2580645161290323,
1582
+ "grad_norm": 12.1607776522528,
1583
+ "learning_rate": 1.3718664500773354e-05,
1584
+ "loss": 1.3202,
1585
+ "step": 225
1586
+ },
1587
+ {
1588
+ "epoch": 1.2636746143057502,
1589
+ "grad_norm": 11.360620610919614,
1590
+ "learning_rate": 1.3666480227970709e-05,
1591
+ "loss": 1.5078,
1592
+ "step": 226
1593
+ },
1594
+ {
1595
+ "epoch": 1.2692847124824684,
1596
+ "grad_norm": 9.312895139596053,
1597
+ "learning_rate": 1.3614197496106844e-05,
1598
+ "loss": 1.2955,
1599
+ "step": 227
1600
+ },
1601
+ {
1602
+ "epoch": 1.2748948106591866,
1603
+ "grad_norm": 22.942061123573257,
1604
+ "learning_rate": 1.3561818235707496e-05,
1605
+ "loss": 1.278,
1606
+ "step": 228
1607
+ },
1608
+ {
1609
+ "epoch": 1.2805049088359046,
1610
+ "grad_norm": 18.018595754740154,
1611
+ "learning_rate": 1.3509344380862686e-05,
1612
+ "loss": 1.5696,
1613
+ "step": 229
1614
+ },
1615
+ {
1616
+ "epoch": 1.2861150070126226,
1617
+ "grad_norm": 11.395912452185224,
1618
+ "learning_rate": 1.3456777869155313e-05,
1619
+ "loss": 1.532,
1620
+ "step": 230
1621
+ },
1622
+ {
1623
+ "epoch": 1.2917251051893408,
1624
+ "grad_norm": 11.638674568201234,
1625
+ "learning_rate": 1.3404120641589603e-05,
1626
+ "loss": 1.811,
1627
+ "step": 231
1628
+ },
1629
+ {
1630
+ "epoch": 1.297335203366059,
1631
+ "grad_norm": 15.157516555268536,
1632
+ "learning_rate": 1.3351374642519442e-05,
1633
+ "loss": 1.5383,
1634
+ "step": 232
1635
+ },
1636
+ {
1637
+ "epoch": 1.302945301542777,
1638
+ "grad_norm": 15.58095439499807,
1639
+ "learning_rate": 1.3298541819576576e-05,
1640
+ "loss": 1.37,
1641
+ "step": 233
1642
+ },
1643
+ {
1644
+ "epoch": 1.308555399719495,
1645
+ "grad_norm": 35.4984649494009,
1646
+ "learning_rate": 1.324562412359871e-05,
1647
+ "loss": 1.4839,
1648
+ "step": 234
1649
+ },
1650
+ {
1651
+ "epoch": 1.3141654978962132,
1652
+ "grad_norm": 10.350141828953962,
1653
+ "learning_rate": 1.3192623508557451e-05,
1654
+ "loss": 1.2646,
1655
+ "step": 235
1656
+ },
1657
+ {
1658
+ "epoch": 1.3197755960729314,
1659
+ "grad_norm": 12.999057294234326,
1660
+ "learning_rate": 1.3139541931486173e-05,
1661
+ "loss": 1.5151,
1662
+ "step": 236
1663
+ },
1664
+ {
1665
+ "epoch": 1.3253856942496494,
1666
+ "grad_norm": 17.4509215245749,
1667
+ "learning_rate": 1.3086381352407755e-05,
1668
+ "loss": 1.3827,
1669
+ "step": 237
1670
+ },
1671
+ {
1672
+ "epoch": 1.3309957924263673,
1673
+ "grad_norm": 11.463089763795967,
1674
+ "learning_rate": 1.3033143734262204e-05,
1675
+ "loss": 1.1956,
1676
+ "step": 238
1677
+ },
1678
+ {
1679
+ "epoch": 1.3366058906030855,
1680
+ "grad_norm": 168.91450904514727,
1681
+ "learning_rate": 1.2979831042834164e-05,
1682
+ "loss": 1.4713,
1683
+ "step": 239
1684
+ },
1685
+ {
1686
+ "epoch": 1.3422159887798037,
1687
+ "grad_norm": 11.567925095640721,
1688
+ "learning_rate": 1.2926445246680348e-05,
1689
+ "loss": 1.2264,
1690
+ "step": 240
1691
+ },
1692
+ {
1693
+ "epoch": 1.3478260869565217,
1694
+ "grad_norm": 10.54147800166849,
1695
+ "learning_rate": 1.2872988317056845e-05,
1696
+ "loss": 1.5958,
1697
+ "step": 241
1698
+ },
1699
+ {
1700
+ "epoch": 1.3534361851332397,
1701
+ "grad_norm": 21.074707300205976,
1702
+ "learning_rate": 1.2819462227846317e-05,
1703
+ "loss": 1.2939,
1704
+ "step": 242
1705
+ },
1706
+ {
1707
+ "epoch": 1.359046283309958,
1708
+ "grad_norm": 13.48881127446839,
1709
+ "learning_rate": 1.2765868955485137e-05,
1710
+ "loss": 1.5187,
1711
+ "step": 243
1712
+ },
1713
+ {
1714
+ "epoch": 1.3646563814866761,
1715
+ "grad_norm": 10.154890818774664,
1716
+ "learning_rate": 1.2712210478890383e-05,
1717
+ "loss": 1.5038,
1718
+ "step": 244
1719
+ },
1720
+ {
1721
+ "epoch": 1.370266479663394,
1722
+ "grad_norm": 18.469132063362895,
1723
+ "learning_rate": 1.26584887793868e-05,
1724
+ "loss": 1.6212,
1725
+ "step": 245
1726
+ },
1727
+ {
1728
+ "epoch": 1.375876577840112,
1729
+ "grad_norm": 12.225168956623056,
1730
+ "learning_rate": 1.2604705840633607e-05,
1731
+ "loss": 1.3629,
1732
+ "step": 246
1733
+ },
1734
+ {
1735
+ "epoch": 1.3814866760168303,
1736
+ "grad_norm": 17.894894417221984,
1737
+ "learning_rate": 1.255086364855127e-05,
1738
+ "loss": 1.3347,
1739
+ "step": 247
1740
+ },
1741
+ {
1742
+ "epoch": 1.3870967741935485,
1743
+ "grad_norm": 14.962485611796625,
1744
+ "learning_rate": 1.2496964191248173e-05,
1745
+ "loss": 1.5094,
1746
+ "step": 248
1747
+ },
1748
+ {
1749
+ "epoch": 1.3927068723702665,
1750
+ "grad_norm": 18.88682118132516,
1751
+ "learning_rate": 1.244300945894719e-05,
1752
+ "loss": 1.3175,
1753
+ "step": 249
1754
+ },
1755
+ {
1756
+ "epoch": 1.3983169705469845,
1757
+ "grad_norm": 18.655608529046333,
1758
+ "learning_rate": 1.2389001443912231e-05,
1759
+ "loss": 1.5244,
1760
+ "step": 250
1761
+ },
1762
+ {
1763
+ "epoch": 1.4039270687237027,
1764
+ "grad_norm": 10.075456800604965,
1765
+ "learning_rate": 1.2334942140374634e-05,
1766
+ "loss": 1.3944,
1767
+ "step": 251
1768
+ },
1769
+ {
1770
+ "epoch": 1.4095371669004209,
1771
+ "grad_norm": 15.028865589899354,
1772
+ "learning_rate": 1.228083354445957e-05,
1773
+ "loss": 1.3883,
1774
+ "step": 252
1775
+ },
1776
+ {
1777
+ "epoch": 1.4151472650771388,
1778
+ "grad_norm": 42.583340154253584,
1779
+ "learning_rate": 1.2226677654112296e-05,
1780
+ "loss": 1.5854,
1781
+ "step": 253
1782
+ },
1783
+ {
1784
+ "epoch": 1.4207573632538568,
1785
+ "grad_norm": 10.056718092820645,
1786
+ "learning_rate": 1.2172476469024422e-05,
1787
+ "loss": 1.6826,
1788
+ "step": 254
1789
+ },
1790
+ {
1791
+ "epoch": 1.426367461430575,
1792
+ "grad_norm": 46.13023482300682,
1793
+ "learning_rate": 1.211823199056005e-05,
1794
+ "loss": 1.3374,
1795
+ "step": 255
1796
+ },
1797
+ {
1798
+ "epoch": 1.4319775596072932,
1799
+ "grad_norm": 10.072978707803287,
1800
+ "learning_rate": 1.2063946221681872e-05,
1801
+ "loss": 1.428,
1802
+ "step": 256
1803
+ },
1804
+ {
1805
+ "epoch": 1.4375876577840112,
1806
+ "grad_norm": 11.02574282037852,
1807
+ "learning_rate": 1.2009621166877224e-05,
1808
+ "loss": 1.3809,
1809
+ "step": 257
1810
+ },
1811
+ {
1812
+ "epoch": 1.4431977559607292,
1813
+ "grad_norm": 9.564143846905477,
1814
+ "learning_rate": 1.1955258832084058e-05,
1815
+ "loss": 1.4093,
1816
+ "step": 258
1817
+ },
1818
+ {
1819
+ "epoch": 1.4488078541374474,
1820
+ "grad_norm": 8.930878694659077,
1821
+ "learning_rate": 1.1900861224616888e-05,
1822
+ "loss": 1.5132,
1823
+ "step": 259
1824
+ },
1825
+ {
1826
+ "epoch": 1.4544179523141656,
1827
+ "grad_norm": 67.29834215473018,
1828
+ "learning_rate": 1.1846430353092653e-05,
1829
+ "loss": 1.5392,
1830
+ "step": 260
1831
+ },
1832
+ {
1833
+ "epoch": 1.4600280504908836,
1834
+ "grad_norm": 11.575055969929595,
1835
+ "learning_rate": 1.1791968227356563e-05,
1836
+ "loss": 1.1486,
1837
+ "step": 261
1838
+ },
1839
+ {
1840
+ "epoch": 1.4656381486676016,
1841
+ "grad_norm": 19.633803271594928,
1842
+ "learning_rate": 1.173747685840788e-05,
1843
+ "loss": 1.7273,
1844
+ "step": 262
1845
+ },
1846
+ {
1847
+ "epoch": 1.4712482468443198,
1848
+ "grad_norm": 15.14394386124325,
1849
+ "learning_rate": 1.168295825832566e-05,
1850
+ "loss": 1.5624,
1851
+ "step": 263
1852
+ },
1853
+ {
1854
+ "epoch": 1.476858345021038,
1855
+ "grad_norm": 10.448296386160346,
1856
+ "learning_rate": 1.1628414440194475e-05,
1857
+ "loss": 1.6643,
1858
+ "step": 264
1859
+ },
1860
+ {
1861
+ "epoch": 1.482468443197756,
1862
+ "grad_norm": 10.794099359683328,
1863
+ "learning_rate": 1.1573847418030053e-05,
1864
+ "loss": 1.5638,
1865
+ "step": 265
1866
+ },
1867
+ {
1868
+ "epoch": 1.488078541374474,
1869
+ "grad_norm": 14.889289307369161,
1870
+ "learning_rate": 1.151925920670493e-05,
1871
+ "loss": 1.4022,
1872
+ "step": 266
1873
+ },
1874
+ {
1875
+ "epoch": 1.4936886395511921,
1876
+ "grad_norm": 13.098686565529942,
1877
+ "learning_rate": 1.1464651821874037e-05,
1878
+ "loss": 1.5339,
1879
+ "step": 267
1880
+ },
1881
+ {
1882
+ "epoch": 1.4992987377279103,
1883
+ "grad_norm": 22.334352986107007,
1884
+ "learning_rate": 1.1410027279900297e-05,
1885
+ "loss": 1.4507,
1886
+ "step": 268
1887
+ },
1888
+ {
1889
+ "epoch": 1.5049088359046283,
1890
+ "grad_norm": 25.84919623189595,
1891
+ "learning_rate": 1.1355387597780147e-05,
1892
+ "loss": 1.7249,
1893
+ "step": 269
1894
+ },
1895
+ {
1896
+ "epoch": 1.5105189340813463,
1897
+ "grad_norm": 15.08636605686211,
1898
+ "learning_rate": 1.1300734793069073e-05,
1899
+ "loss": 1.6036,
1900
+ "step": 270
1901
+ },
1902
+ {
1903
+ "epoch": 1.5161290322580645,
1904
+ "grad_norm": 9.556192428160092,
1905
+ "learning_rate": 1.1246070883807103e-05,
1906
+ "loss": 1.6183,
1907
+ "step": 271
1908
+ },
1909
+ {
1910
+ "epoch": 1.5217391304347827,
1911
+ "grad_norm": 9.945367086120191,
1912
+ "learning_rate": 1.1191397888444303e-05,
1913
+ "loss": 1.4395,
1914
+ "step": 272
1915
+ },
1916
+ {
1917
+ "epoch": 1.5273492286115007,
1918
+ "grad_norm": 11.691573124439154,
1919
+ "learning_rate": 1.1136717825766236e-05,
1920
+ "loss": 1.3199,
1921
+ "step": 273
1922
+ },
1923
+ {
1924
+ "epoch": 1.5329593267882187,
1925
+ "grad_norm": 15.728068778759177,
1926
+ "learning_rate": 1.1082032714819435e-05,
1927
+ "loss": 1.4037,
1928
+ "step": 274
1929
+ },
1930
+ {
1931
+ "epoch": 1.5385694249649369,
1932
+ "grad_norm": 35.24549151590082,
1933
+ "learning_rate": 1.1027344574836825e-05,
1934
+ "loss": 1.5653,
1935
+ "step": 275
1936
+ },
1937
+ {
1938
+ "epoch": 1.544179523141655,
1939
+ "grad_norm": 16.731762943741455,
1940
+ "learning_rate": 1.0972655425163183e-05,
1941
+ "loss": 1.5923,
1942
+ "step": 276
1943
+ },
1944
+ {
1945
+ "epoch": 1.549789621318373,
1946
+ "grad_norm": 12.833069361335747,
1947
+ "learning_rate": 1.0917967285180571e-05,
1948
+ "loss": 1.5142,
1949
+ "step": 277
1950
+ },
1951
+ {
1952
+ "epoch": 1.555399719495091,
1953
+ "grad_norm": 19.33521264857588,
1954
+ "learning_rate": 1.0863282174233766e-05,
1955
+ "loss": 1.3127,
1956
+ "step": 278
1957
+ },
1958
+ {
1959
+ "epoch": 1.5610098176718092,
1960
+ "grad_norm": 10.886910197548698,
1961
+ "learning_rate": 1.0808602111555702e-05,
1962
+ "loss": 1.6208,
1963
+ "step": 279
1964
+ },
1965
+ {
1966
+ "epoch": 1.5666199158485274,
1967
+ "grad_norm": 12.801692217446645,
1968
+ "learning_rate": 1.07539291161929e-05,
1969
+ "loss": 1.3401,
1970
+ "step": 280
1971
+ },
1972
+ {
1973
+ "epoch": 1.5722300140252454,
1974
+ "grad_norm": 17.836580126577534,
1975
+ "learning_rate": 1.0699265206930934e-05,
1976
+ "loss": 1.5505,
1977
+ "step": 281
1978
+ },
1979
+ {
1980
+ "epoch": 1.5778401122019634,
1981
+ "grad_norm": 17.357322700028668,
1982
+ "learning_rate": 1.0644612402219854e-05,
1983
+ "loss": 1.698,
1984
+ "step": 282
1985
+ },
1986
+ {
1987
+ "epoch": 1.5834502103786816,
1988
+ "grad_norm": 15.409331750453493,
1989
+ "learning_rate": 1.0589972720099704e-05,
1990
+ "loss": 1.5117,
1991
+ "step": 283
1992
+ },
1993
+ {
1994
+ "epoch": 1.5890603085553998,
1995
+ "grad_norm": 11.164793017120967,
1996
+ "learning_rate": 1.0535348178125965e-05,
1997
+ "loss": 1.5952,
1998
+ "step": 284
1999
+ },
2000
+ {
2001
+ "epoch": 1.5946704067321178,
2002
+ "grad_norm": 19.906733367495576,
2003
+ "learning_rate": 1.0480740793295074e-05,
2004
+ "loss": 1.3231,
2005
+ "step": 285
2006
+ },
2007
+ {
2008
+ "epoch": 1.6002805049088358,
2009
+ "grad_norm": 9.391700103965178,
2010
+ "learning_rate": 1.042615258196995e-05,
2011
+ "loss": 1.5088,
2012
+ "step": 286
2013
+ },
2014
+ {
2015
+ "epoch": 1.605890603085554,
2016
+ "grad_norm": 11.364457340008288,
2017
+ "learning_rate": 1.0371585559805528e-05,
2018
+ "loss": 1.2463,
2019
+ "step": 287
2020
+ },
2021
+ {
2022
+ "epoch": 1.6115007012622722,
2023
+ "grad_norm": 14.723407501563633,
2024
+ "learning_rate": 1.0317041741674341e-05,
2025
+ "loss": 1.4825,
2026
+ "step": 288
2027
+ },
2028
+ {
2029
+ "epoch": 1.6171107994389902,
2030
+ "grad_norm": 12.584544998433199,
2031
+ "learning_rate": 1.0262523141592126e-05,
2032
+ "loss": 1.8245,
2033
+ "step": 289
2034
+ },
2035
+ {
2036
+ "epoch": 1.6227208976157081,
2037
+ "grad_norm": 13.733073782677504,
2038
+ "learning_rate": 1.0208031772643443e-05,
2039
+ "loss": 1.4269,
2040
+ "step": 290
2041
+ },
2042
+ {
2043
+ "epoch": 1.6283309957924264,
2044
+ "grad_norm": 14.624299291370347,
2045
+ "learning_rate": 1.0153569646907355e-05,
2046
+ "loss": 1.3492,
2047
+ "step": 291
2048
+ },
2049
+ {
2050
+ "epoch": 1.6339410939691446,
2051
+ "grad_norm": 12.581946392558763,
2052
+ "learning_rate": 1.0099138775383115e-05,
2053
+ "loss": 1.4458,
2054
+ "step": 292
2055
+ },
2056
+ {
2057
+ "epoch": 1.6395511921458625,
2058
+ "grad_norm": 14.444232112139593,
2059
+ "learning_rate": 1.0044741167915946e-05,
2060
+ "loss": 1.3966,
2061
+ "step": 293
2062
+ },
2063
+ {
2064
+ "epoch": 1.6451612903225805,
2065
+ "grad_norm": 19.357239455692902,
2066
+ "learning_rate": 9.990378833122782e-06,
2067
+ "loss": 1.3954,
2068
+ "step": 294
2069
+ },
2070
+ {
2071
+ "epoch": 1.6507713884992987,
2072
+ "grad_norm": 16.39319341444051,
2073
+ "learning_rate": 9.93605377831813e-06,
2074
+ "loss": 1.4255,
2075
+ "step": 295
2076
+ },
2077
+ {
2078
+ "epoch": 1.656381486676017,
2079
+ "grad_norm": 30.389343705342387,
2080
+ "learning_rate": 9.881768009439952e-06,
2081
+ "loss": 1.3105,
2082
+ "step": 296
2083
+ },
2084
+ {
2085
+ "epoch": 1.661991584852735,
2086
+ "grad_norm": 10.78767731128213,
2087
+ "learning_rate": 9.82752353097558e-06,
2088
+ "loss": 1.5082,
2089
+ "step": 297
2090
+ },
2091
+ {
2092
+ "epoch": 1.6676016830294529,
2093
+ "grad_norm": 10.851960829756253,
2094
+ "learning_rate": 9.773322345887705e-06,
2095
+ "loss": 1.3302,
2096
+ "step": 298
2097
+ },
2098
+ {
2099
+ "epoch": 1.673211781206171,
2100
+ "grad_norm": 9.059627340481349,
2101
+ "learning_rate": 9.719166455540437e-06,
2102
+ "loss": 1.428,
2103
+ "step": 299
2104
+ },
2105
+ {
2106
+ "epoch": 1.6788218793828893,
2107
+ "grad_norm": 11.056590609678853,
2108
+ "learning_rate": 9.665057859625367e-06,
2109
+ "loss": 1.4933,
2110
+ "step": 300
2111
+ },
2112
+ {
2113
+ "epoch": 1.6844319775596073,
2114
+ "grad_norm": 18.000797951799324,
2115
+ "learning_rate": 9.61099855608777e-06,
2116
+ "loss": 1.3368,
2117
+ "step": 301
2118
+ },
2119
+ {
2120
+ "epoch": 1.6900420757363253,
2121
+ "grad_norm": 12.711029397236132,
2122
+ "learning_rate": 9.556990541052811e-06,
2123
+ "loss": 1.5315,
2124
+ "step": 302
2125
+ },
2126
+ {
2127
+ "epoch": 1.6956521739130435,
2128
+ "grad_norm": 16.660350616242294,
2129
+ "learning_rate": 9.50303580875183e-06,
2130
+ "loss": 1.5576,
2131
+ "step": 303
2132
+ },
2133
+ {
2134
+ "epoch": 1.7012622720897617,
2135
+ "grad_norm": 14.18477102640596,
2136
+ "learning_rate": 9.449136351448733e-06,
2137
+ "loss": 1.4395,
2138
+ "step": 304
2139
+ },
2140
+ {
2141
+ "epoch": 1.7068723702664796,
2142
+ "grad_norm": 16.66543319989344,
2143
+ "learning_rate": 9.395294159366398e-06,
2144
+ "loss": 1.3333,
2145
+ "step": 305
2146
+ },
2147
+ {
2148
+ "epoch": 1.7124824684431976,
2149
+ "grad_norm": 12.550767222637742,
2150
+ "learning_rate": 9.341511220613203e-06,
2151
+ "loss": 1.4948,
2152
+ "step": 306
2153
+ },
2154
+ {
2155
+ "epoch": 1.7180925666199158,
2156
+ "grad_norm": 23.14843194571176,
2157
+ "learning_rate": 9.287789521109619e-06,
2158
+ "loss": 1.3759,
2159
+ "step": 307
2160
+ },
2161
+ {
2162
+ "epoch": 1.723702664796634,
2163
+ "grad_norm": 10.179110221624262,
2164
+ "learning_rate": 9.234131044514866e-06,
2165
+ "loss": 1.4955,
2166
+ "step": 308
2167
+ },
2168
+ {
2169
+ "epoch": 1.729312762973352,
2170
+ "grad_norm": 11.671550250237754,
2171
+ "learning_rate": 9.180537772153689e-06,
2172
+ "loss": 1.5044,
2173
+ "step": 309
2174
+ },
2175
+ {
2176
+ "epoch": 1.73492286115007,
2177
+ "grad_norm": 18.158491953419958,
2178
+ "learning_rate": 9.12701168294316e-06,
2179
+ "loss": 1.5487,
2180
+ "step": 310
2181
+ },
2182
+ {
2183
+ "epoch": 1.7405329593267882,
2184
+ "grad_norm": 16.378480991727116,
2185
+ "learning_rate": 9.073554753319653e-06,
2186
+ "loss": 1.6555,
2187
+ "step": 311
2188
+ },
2189
+ {
2190
+ "epoch": 1.7461430575035064,
2191
+ "grad_norm": 14.612627221953167,
2192
+ "learning_rate": 9.020168957165843e-06,
2193
+ "loss": 1.3901,
2194
+ "step": 312
2195
+ },
2196
+ {
2197
+ "epoch": 1.7517531556802244,
2198
+ "grad_norm": 22.199668296254806,
2199
+ "learning_rate": 8.9668562657378e-06,
2200
+ "loss": 1.2821,
2201
+ "step": 313
2202
+ },
2203
+ {
2204
+ "epoch": 1.7573632538569424,
2205
+ "grad_norm": 48.85051417469821,
2206
+ "learning_rate": 8.913618647592246e-06,
2207
+ "loss": 1.4017,
2208
+ "step": 314
2209
+ },
2210
+ {
2211
+ "epoch": 1.7629733520336606,
2212
+ "grad_norm": 11.525812973776294,
2213
+ "learning_rate": 8.860458068513831e-06,
2214
+ "loss": 1.405,
2215
+ "step": 315
2216
+ },
2217
+ {
2218
+ "epoch": 1.7685834502103788,
2219
+ "grad_norm": 9.926236369384918,
2220
+ "learning_rate": 8.807376491442552e-06,
2221
+ "loss": 1.498,
2222
+ "step": 316
2223
+ },
2224
+ {
2225
+ "epoch": 1.7741935483870968,
2226
+ "grad_norm": 12.644238947963776,
2227
+ "learning_rate": 8.754375876401296e-06,
2228
+ "loss": 1.5187,
2229
+ "step": 317
2230
+ },
2231
+ {
2232
+ "epoch": 1.7798036465638147,
2233
+ "grad_norm": 17.063941988263224,
2234
+ "learning_rate": 8.701458180423425e-06,
2235
+ "loss": 1.4912,
2236
+ "step": 318
2237
+ },
2238
+ {
2239
+ "epoch": 1.785413744740533,
2240
+ "grad_norm": 21.892751402938607,
2241
+ "learning_rate": 8.648625357480563e-06,
2242
+ "loss": 1.6582,
2243
+ "step": 319
2244
+ },
2245
+ {
2246
+ "epoch": 1.7910238429172511,
2247
+ "grad_norm": 11.579396690290965,
2248
+ "learning_rate": 8.595879358410402e-06,
2249
+ "loss": 1.2974,
2250
+ "step": 320
2251
+ },
2252
+ {
2253
+ "epoch": 1.7966339410939691,
2254
+ "grad_norm": 7.407872465259077,
2255
+ "learning_rate": 8.543222130844693e-06,
2256
+ "loss": 1.3936,
2257
+ "step": 321
2258
+ },
2259
+ {
2260
+ "epoch": 1.802244039270687,
2261
+ "grad_norm": 13.541093138926195,
2262
+ "learning_rate": 8.490655619137318e-06,
2263
+ "loss": 1.4678,
2264
+ "step": 322
2265
+ },
2266
+ {
2267
+ "epoch": 1.8078541374474053,
2268
+ "grad_norm": 13.169175604119717,
2269
+ "learning_rate": 8.438181764292509e-06,
2270
+ "loss": 1.4833,
2271
+ "step": 323
2272
+ },
2273
+ {
2274
+ "epoch": 1.8134642356241235,
2275
+ "grad_norm": 9.29297616022089,
2276
+ "learning_rate": 8.385802503893159e-06,
2277
+ "loss": 1.3668,
2278
+ "step": 324
2279
+ },
2280
+ {
2281
+ "epoch": 1.8190743338008415,
2282
+ "grad_norm": 21.69771394920528,
2283
+ "learning_rate": 8.333519772029297e-06,
2284
+ "loss": 1.5304,
2285
+ "step": 325
2286
+ },
2287
+ {
2288
+ "epoch": 1.8246844319775595,
2289
+ "grad_norm": 8.695597330254802,
2290
+ "learning_rate": 8.28133549922665e-06,
2291
+ "loss": 1.183,
2292
+ "step": 326
2293
+ },
2294
+ {
2295
+ "epoch": 1.8302945301542777,
2296
+ "grad_norm": 11.450047920423891,
2297
+ "learning_rate": 8.229251612375383e-06,
2298
+ "loss": 1.0692,
2299
+ "step": 327
2300
+ },
2301
+ {
2302
+ "epoch": 1.8359046283309959,
2303
+ "grad_norm": 15.564245276964593,
2304
+ "learning_rate": 8.177270034658926e-06,
2305
+ "loss": 1.3317,
2306
+ "step": 328
2307
+ },
2308
+ {
2309
+ "epoch": 1.8415147265077139,
2310
+ "grad_norm": 12.643991254106773,
2311
+ "learning_rate": 8.12539268548298e-06,
2312
+ "loss": 1.1766,
2313
+ "step": 329
2314
+ },
2315
+ {
2316
+ "epoch": 1.8471248246844318,
2317
+ "grad_norm": 13.666387299973383,
2318
+ "learning_rate": 8.073621480404632e-06,
2319
+ "loss": 1.4672,
2320
+ "step": 330
2321
+ },
2322
+ {
2323
+ "epoch": 1.85273492286115,
2324
+ "grad_norm": 15.118851784764775,
2325
+ "learning_rate": 8.021958331061622e-06,
2326
+ "loss": 1.5273,
2327
+ "step": 331
2328
+ },
2329
+ {
2330
+ "epoch": 1.8583450210378682,
2331
+ "grad_norm": 14.20758535775456,
2332
+ "learning_rate": 7.97040514510177e-06,
2333
+ "loss": 1.4684,
2334
+ "step": 332
2335
+ },
2336
+ {
2337
+ "epoch": 1.8639551192145862,
2338
+ "grad_norm": 10.973607901863156,
2339
+ "learning_rate": 7.91896382611253e-06,
2340
+ "loss": 1.4965,
2341
+ "step": 333
2342
+ },
2343
+ {
2344
+ "epoch": 1.8695652173913042,
2345
+ "grad_norm": 14.247686399471148,
2346
+ "learning_rate": 7.86763627355068e-06,
2347
+ "loss": 1.2917,
2348
+ "step": 334
2349
+ },
2350
+ {
2351
+ "epoch": 1.8751753155680224,
2352
+ "grad_norm": 21.50390844260042,
2353
+ "learning_rate": 7.816424382672229e-06,
2354
+ "loss": 1.562,
2355
+ "step": 335
2356
+ },
2357
+ {
2358
+ "epoch": 1.8807854137447406,
2359
+ "grad_norm": 11.819217383633273,
2360
+ "learning_rate": 7.765330044462386e-06,
2361
+ "loss": 1.4822,
2362
+ "step": 336
2363
+ },
2364
+ {
2365
+ "epoch": 1.8863955119214586,
2366
+ "grad_norm": 14.161357008398616,
2367
+ "learning_rate": 7.714355145565774e-06,
2368
+ "loss": 1.6368,
2369
+ "step": 337
2370
+ },
2371
+ {
2372
+ "epoch": 1.8920056100981766,
2373
+ "grad_norm": 16.56545755056043,
2374
+ "learning_rate": 7.663501568216748e-06,
2375
+ "loss": 1.3914,
2376
+ "step": 338
2377
+ },
2378
+ {
2379
+ "epoch": 1.8976157082748948,
2380
+ "grad_norm": 14.791392493152658,
2381
+ "learning_rate": 7.612771190169896e-06,
2382
+ "loss": 1.5294,
2383
+ "step": 339
2384
+ },
2385
+ {
2386
+ "epoch": 1.903225806451613,
2387
+ "grad_norm": 16.282820183914083,
2388
+ "learning_rate": 7.56216588463071e-06,
2389
+ "loss": 1.4449,
2390
+ "step": 340
2391
+ },
2392
+ {
2393
+ "epoch": 1.908835904628331,
2394
+ "grad_norm": 24.6122748043118,
2395
+ "learning_rate": 7.511687520186404e-06,
2396
+ "loss": 1.5496,
2397
+ "step": 341
2398
+ },
2399
+ {
2400
+ "epoch": 1.914446002805049,
2401
+ "grad_norm": 9.878894269958142,
2402
+ "learning_rate": 7.461337960736936e-06,
2403
+ "loss": 1.3899,
2404
+ "step": 342
2405
+ },
2406
+ {
2407
+ "epoch": 1.9200561009817672,
2408
+ "grad_norm": 17.380558867917127,
2409
+ "learning_rate": 7.411119065426174e-06,
2410
+ "loss": 1.2366,
2411
+ "step": 343
2412
+ },
2413
+ {
2414
+ "epoch": 1.9256661991584854,
2415
+ "grad_norm": 11.312914459477994,
2416
+ "learning_rate": 7.361032688573235e-06,
2417
+ "loss": 1.4102,
2418
+ "step": 344
2419
+ },
2420
+ {
2421
+ "epoch": 1.9312762973352033,
2422
+ "grad_norm": 103.5132378549981,
2423
+ "learning_rate": 7.311080679604048e-06,
2424
+ "loss": 1.5552,
2425
+ "step": 345
2426
+ },
2427
+ {
2428
+ "epoch": 1.9368863955119213,
2429
+ "grad_norm": 11.141721273139423,
2430
+ "learning_rate": 7.261264882983024e-06,
2431
+ "loss": 1.2666,
2432
+ "step": 346
2433
+ },
2434
+ {
2435
+ "epoch": 1.9424964936886395,
2436
+ "grad_norm": 13.191713261655808,
2437
+ "learning_rate": 7.211587138144986e-06,
2438
+ "loss": 1.3972,
2439
+ "step": 347
2440
+ },
2441
+ {
2442
+ "epoch": 1.9481065918653577,
2443
+ "grad_norm": 10.217614055594426,
2444
+ "learning_rate": 7.162049279427228e-06,
2445
+ "loss": 1.4429,
2446
+ "step": 348
2447
+ },
2448
+ {
2449
+ "epoch": 1.9537166900420757,
2450
+ "grad_norm": 29.313597844076863,
2451
+ "learning_rate": 7.112653136001777e-06,
2452
+ "loss": 1.3011,
2453
+ "step": 349
2454
+ },
2455
+ {
2456
+ "epoch": 1.9593267882187937,
2457
+ "grad_norm": 13.292763277903367,
2458
+ "learning_rate": 7.063400531807873e-06,
2459
+ "loss": 1.5477,
2460
+ "step": 350
2461
+ },
2462
+ {
2463
+ "epoch": 1.964936886395512,
2464
+ "grad_norm": 13.900844713934955,
2465
+ "learning_rate": 7.0142932854846104e-06,
2466
+ "loss": 1.5371,
2467
+ "step": 351
2468
+ },
2469
+ {
2470
+ "epoch": 1.97054698457223,
2471
+ "grad_norm": 9.80007860324782,
2472
+ "learning_rate": 6.965333210303764e-06,
2473
+ "loss": 1.4814,
2474
+ "step": 352
2475
+ },
2476
+ {
2477
+ "epoch": 1.976157082748948,
2478
+ "grad_norm": 31.719102324919326,
2479
+ "learning_rate": 6.9165221141028825e-06,
2480
+ "loss": 1.5109,
2481
+ "step": 353
2482
+ },
2483
+ {
2484
+ "epoch": 1.981767180925666,
2485
+ "grad_norm": 41.16300740394282,
2486
+ "learning_rate": 6.8678617992184785e-06,
2487
+ "loss": 1.3093,
2488
+ "step": 354
2489
+ },
2490
+ {
2491
+ "epoch": 1.9873772791023843,
2492
+ "grad_norm": 14.381052416660255,
2493
+ "learning_rate": 6.819354062419525e-06,
2494
+ "loss": 1.4739,
2495
+ "step": 355
2496
+ },
2497
+ {
2498
+ "epoch": 1.9929873772791025,
2499
+ "grad_norm": 18.327960590810015,
2500
+ "learning_rate": 6.771000694841085e-06,
2501
+ "loss": 1.4016,
2502
+ "step": 356
2503
+ },
2504
+ {
2505
+ "epoch": 1.9985974754558204,
2506
+ "grad_norm": 23.842506596521428,
2507
+ "learning_rate": 6.722803481918174e-06,
2508
+ "loss": 1.3469,
2509
+ "step": 357
2510
+ },
2511
+ {
2512
+ "epoch": 2.0,
2513
+ "grad_norm": 23.842506596521428,
2514
+ "learning_rate": 6.674764203319847e-06,
2515
+ "loss": 0.4292,
2516
+ "step": 358
2517
+ },
2518
+ {
2519
+ "epoch": 2.005610098176718,
2520
+ "grad_norm": 62.91644908073718,
2521
+ "learning_rate": 6.626884632883468e-06,
2522
+ "loss": 1.3894,
2523
+ "step": 359
2524
+ },
2525
+ {
2526
+ "epoch": 2.0112201963534364,
2527
+ "grad_norm": 17.385006546849574,
2528
+ "learning_rate": 6.579166538549232e-06,
2529
+ "loss": 1.396,
2530
+ "step": 360
2531
+ },
2532
+ {
2533
+ "epoch": 2.016830294530154,
2534
+ "grad_norm": 11.14616894465407,
2535
+ "learning_rate": 6.531611682294862e-06,
2536
+ "loss": 1.3324,
2537
+ "step": 361
2538
+ },
2539
+ {
2540
+ "epoch": 2.0224403927068724,
2541
+ "grad_norm": 13.234762237299156,
2542
+ "learning_rate": 6.48422182007056e-06,
2543
+ "loss": 1.5692,
2544
+ "step": 362
2545
+ },
2546
+ {
2547
+ "epoch": 2.0280504908835906,
2548
+ "grad_norm": 16.139630982243954,
2549
+ "learning_rate": 6.436998701734178e-06,
2550
+ "loss": 1.5417,
2551
+ "step": 363
2552
+ },
2553
+ {
2554
+ "epoch": 2.0336605890603083,
2555
+ "grad_norm": 10.479080049599737,
2556
+ "learning_rate": 6.389944070986582e-06,
2557
+ "loss": 1.5044,
2558
+ "step": 364
2559
+ },
2560
+ {
2561
+ "epoch": 2.0392706872370265,
2562
+ "grad_norm": 9.271240066180434,
2563
+ "learning_rate": 6.343059665307288e-06,
2564
+ "loss": 1.3809,
2565
+ "step": 365
2566
+ },
2567
+ {
2568
+ "epoch": 2.0448807854137447,
2569
+ "grad_norm": 12.665084229385839,
2570
+ "learning_rate": 6.296347215890299e-06,
2571
+ "loss": 1.7156,
2572
+ "step": 366
2573
+ },
2574
+ {
2575
+ "epoch": 2.050490883590463,
2576
+ "grad_norm": 11.925013818106061,
2577
+ "learning_rate": 6.24980844758017e-06,
2578
+ "loss": 1.3325,
2579
+ "step": 367
2580
+ },
2581
+ {
2582
+ "epoch": 2.056100981767181,
2583
+ "grad_norm": 28.984453902503265,
2584
+ "learning_rate": 6.203445078808334e-06,
2585
+ "loss": 1.5709,
2586
+ "step": 368
2587
+ },
2588
+ {
2589
+ "epoch": 2.061711079943899,
2590
+ "grad_norm": 36.32322282586609,
2591
+ "learning_rate": 6.157258821529648e-06,
2592
+ "loss": 1.3057,
2593
+ "step": 369
2594
+ },
2595
+ {
2596
+ "epoch": 2.067321178120617,
2597
+ "grad_norm": 18.435527641328143,
2598
+ "learning_rate": 6.1112513811591625e-06,
2599
+ "loss": 1.4139,
2600
+ "step": 370
2601
+ },
2602
+ {
2603
+ "epoch": 2.0729312762973353,
2604
+ "grad_norm": 15.316146790050897,
2605
+ "learning_rate": 6.065424456509177e-06,
2606
+ "loss": 1.4445,
2607
+ "step": 371
2608
+ },
2609
+ {
2610
+ "epoch": 2.078541374474053,
2611
+ "grad_norm": 9.049133203538355,
2612
+ "learning_rate": 6.019779739726483e-06,
2613
+ "loss": 1.3055,
2614
+ "step": 372
2615
+ },
2616
+ {
2617
+ "epoch": 2.0841514726507713,
2618
+ "grad_norm": 24.250935935571302,
2619
+ "learning_rate": 5.974318916229909e-06,
2620
+ "loss": 1.2852,
2621
+ "step": 373
2622
+ },
2623
+ {
2624
+ "epoch": 2.0897615708274895,
2625
+ "grad_norm": 11.121746868870078,
2626
+ "learning_rate": 5.9290436646480635e-06,
2627
+ "loss": 1.3112,
2628
+ "step": 374
2629
+ },
2630
+ {
2631
+ "epoch": 2.0953716690042077,
2632
+ "grad_norm": 27.640809058263656,
2633
+ "learning_rate": 5.883955656757361e-06,
2634
+ "loss": 1.2175,
2635
+ "step": 375
2636
+ },
2637
+ {
2638
+ "epoch": 2.100981767180926,
2639
+ "grad_norm": 7.733612352649396,
2640
+ "learning_rate": 5.839056557420306e-06,
2641
+ "loss": 1.5557,
2642
+ "step": 376
2643
+ },
2644
+ {
2645
+ "epoch": 2.1065918653576436,
2646
+ "grad_norm": 18.803659684472592,
2647
+ "learning_rate": 5.794348024523991e-06,
2648
+ "loss": 1.3253,
2649
+ "step": 377
2650
+ },
2651
+ {
2652
+ "epoch": 2.112201963534362,
2653
+ "grad_norm": 8.840256456646525,
2654
+ "learning_rate": 5.749831708918905e-06,
2655
+ "loss": 1.5209,
2656
+ "step": 378
2657
+ },
2658
+ {
2659
+ "epoch": 2.11781206171108,
2660
+ "grad_norm": 9.645779423670444,
2661
+ "learning_rate": 5.705509254357962e-06,
2662
+ "loss": 1.4534,
2663
+ "step": 379
2664
+ },
2665
+ {
2666
+ "epoch": 2.123422159887798,
2667
+ "grad_norm": 19.518741348844134,
2668
+ "learning_rate": 5.6613822974358046e-06,
2669
+ "loss": 1.3217,
2670
+ "step": 380
2671
+ },
2672
+ {
2673
+ "epoch": 2.129032258064516,
2674
+ "grad_norm": 22.14350090057281,
2675
+ "learning_rate": 5.617452467528387e-06,
2676
+ "loss": 1.3701,
2677
+ "step": 381
2678
+ },
2679
+ {
2680
+ "epoch": 2.134642356241234,
2681
+ "grad_norm": 12.240003119839916,
2682
+ "learning_rate": 5.573721386732792e-06,
2683
+ "loss": 1.2617,
2684
+ "step": 382
2685
+ },
2686
+ {
2687
+ "epoch": 2.1402524544179524,
2688
+ "grad_norm": 9.94341073314739,
2689
+ "learning_rate": 5.530190669807354e-06,
2690
+ "loss": 1.433,
2691
+ "step": 383
2692
+ },
2693
+ {
2694
+ "epoch": 2.1458625525946706,
2695
+ "grad_norm": 19.958625594652514,
2696
+ "learning_rate": 5.486861924112021e-06,
2697
+ "loss": 1.1168,
2698
+ "step": 384
2699
+ },
2700
+ {
2701
+ "epoch": 2.1514726507713884,
2702
+ "grad_norm": 19.91297539332411,
2703
+ "learning_rate": 5.443736749549008e-06,
2704
+ "loss": 1.5081,
2705
+ "step": 385
2706
+ },
2707
+ {
2708
+ "epoch": 2.1570827489481066,
2709
+ "grad_norm": 9.46122164871927,
2710
+ "learning_rate": 5.400816738503725e-06,
2711
+ "loss": 1.6113,
2712
+ "step": 386
2713
+ },
2714
+ {
2715
+ "epoch": 2.162692847124825,
2716
+ "grad_norm": 15.132774640948087,
2717
+ "learning_rate": 5.358103475785963e-06,
2718
+ "loss": 1.7156,
2719
+ "step": 387
2720
+ },
2721
+ {
2722
+ "epoch": 2.1683029453015426,
2723
+ "grad_norm": 27.73349930634087,
2724
+ "learning_rate": 5.315598538571405e-06,
2725
+ "loss": 1.4951,
2726
+ "step": 388
2727
+ },
2728
+ {
2729
+ "epoch": 2.1739130434782608,
2730
+ "grad_norm": 21.02138889105744,
2731
+ "learning_rate": 5.273303496343356e-06,
2732
+ "loss": 1.4608,
2733
+ "step": 389
2734
+ },
2735
+ {
2736
+ "epoch": 2.179523141654979,
2737
+ "grad_norm": 15.21722666958038,
2738
+ "learning_rate": 5.231219910834808e-06,
2739
+ "loss": 1.3213,
2740
+ "step": 390
2741
+ },
2742
+ {
2743
+ "epoch": 2.185133239831697,
2744
+ "grad_norm": 17.053473708225102,
2745
+ "learning_rate": 5.189349335970779e-06,
2746
+ "loss": 1.6626,
2747
+ "step": 391
2748
+ },
2749
+ {
2750
+ "epoch": 2.1907433380084154,
2751
+ "grad_norm": 30.125323797473353,
2752
+ "learning_rate": 5.147693317810922e-06,
2753
+ "loss": 1.4193,
2754
+ "step": 392
2755
+ },
2756
+ {
2757
+ "epoch": 2.196353436185133,
2758
+ "grad_norm": 14.582213567512756,
2759
+ "learning_rate": 5.106253394492435e-06,
2760
+ "loss": 1.396,
2761
+ "step": 393
2762
+ },
2763
+ {
2764
+ "epoch": 2.2019635343618513,
2765
+ "grad_norm": 18.58071509244748,
2766
+ "learning_rate": 5.065031096173285e-06,
2767
+ "loss": 1.3126,
2768
+ "step": 394
2769
+ },
2770
+ {
2771
+ "epoch": 2.2075736325385695,
2772
+ "grad_norm": 18.323391904983588,
2773
+ "learning_rate": 5.024027944975682e-06,
2774
+ "loss": 1.5001,
2775
+ "step": 395
2776
+ },
2777
+ {
2778
+ "epoch": 2.2131837307152873,
2779
+ "grad_norm": 9.131786473934786,
2780
+ "learning_rate": 4.983245454929897e-06,
2781
+ "loss": 1.1489,
2782
+ "step": 396
2783
+ },
2784
+ {
2785
+ "epoch": 2.2187938288920055,
2786
+ "grad_norm": 19.531251954860753,
2787
+ "learning_rate": 4.942685131918347e-06,
2788
+ "loss": 1.7063,
2789
+ "step": 397
2790
+ },
2791
+ {
2792
+ "epoch": 2.2244039270687237,
2793
+ "grad_norm": 11.650215958091437,
2794
+ "learning_rate": 4.902348473619982e-06,
2795
+ "loss": 1.3877,
2796
+ "step": 398
2797
+ },
2798
+ {
2799
+ "epoch": 2.230014025245442,
2800
+ "grad_norm": 14.244474600713215,
2801
+ "learning_rate": 4.862236969455008e-06,
2802
+ "loss": 1.3306,
2803
+ "step": 399
2804
+ },
2805
+ {
2806
+ "epoch": 2.23562412342216,
2807
+ "grad_norm": 27.134645926912214,
2808
+ "learning_rate": 4.82235210052986e-06,
2809
+ "loss": 1.5161,
2810
+ "step": 400
2811
+ },
2812
+ {
2813
+ "epoch": 2.241234221598878,
2814
+ "grad_norm": 19.080120264645604,
2815
+ "learning_rate": 4.782695339582542e-06,
2816
+ "loss": 1.2893,
2817
+ "step": 401
2818
+ },
2819
+ {
2820
+ "epoch": 2.246844319775596,
2821
+ "grad_norm": 11.03852579482729,
2822
+ "learning_rate": 4.74326815092822e-06,
2823
+ "loss": 1.3512,
2824
+ "step": 402
2825
+ },
2826
+ {
2827
+ "epoch": 2.2524544179523143,
2828
+ "grad_norm": 8.02379081596676,
2829
+ "learning_rate": 4.704071990405177e-06,
2830
+ "loss": 1.2449,
2831
+ "step": 403
2832
+ },
2833
+ {
2834
+ "epoch": 2.258064516129032,
2835
+ "grad_norm": 9.947902369037147,
2836
+ "learning_rate": 4.6651083053210335e-06,
2837
+ "loss": 1.2081,
2838
+ "step": 404
2839
+ },
2840
+ {
2841
+ "epoch": 2.2636746143057502,
2842
+ "grad_norm": 18.939217068942586,
2843
+ "learning_rate": 4.6263785343993275e-06,
2844
+ "loss": 1.5017,
2845
+ "step": 405
2846
+ },
2847
+ {
2848
+ "epoch": 2.2692847124824684,
2849
+ "grad_norm": 49.48205350763517,
2850
+ "learning_rate": 4.587884107726371e-06,
2851
+ "loss": 1.5256,
2852
+ "step": 406
2853
+ },
2854
+ {
2855
+ "epoch": 2.2748948106591866,
2856
+ "grad_norm": 62.34571721147613,
2857
+ "learning_rate": 4.549626446698462e-06,
2858
+ "loss": 1.2649,
2859
+ "step": 407
2860
+ },
2861
+ {
2862
+ "epoch": 2.280504908835905,
2863
+ "grad_norm": 11.301553295405313,
2864
+ "learning_rate": 4.51160696396938e-06,
2865
+ "loss": 1.3876,
2866
+ "step": 408
2867
+ },
2868
+ {
2869
+ "epoch": 2.2861150070126226,
2870
+ "grad_norm": 14.545319112891631,
2871
+ "learning_rate": 4.473827063398246e-06,
2872
+ "loss": 1.6277,
2873
+ "step": 409
2874
+ },
2875
+ {
2876
+ "epoch": 2.291725105189341,
2877
+ "grad_norm": 12.111799331941523,
2878
+ "learning_rate": 4.436288139997673e-06,
2879
+ "loss": 1.4128,
2880
+ "step": 410
2881
+ },
2882
+ {
2883
+ "epoch": 2.297335203366059,
2884
+ "grad_norm": 25.16319364869307,
2885
+ "learning_rate": 4.398991579882247e-06,
2886
+ "loss": 1.3649,
2887
+ "step": 411
2888
+ },
2889
+ {
2890
+ "epoch": 2.3029453015427768,
2891
+ "grad_norm": 9.705872335231792,
2892
+ "learning_rate": 4.361938760217371e-06,
2893
+ "loss": 1.2412,
2894
+ "step": 412
2895
+ },
2896
+ {
2897
+ "epoch": 2.308555399719495,
2898
+ "grad_norm": 15.004750384511468,
2899
+ "learning_rate": 4.325131049168382e-06,
2900
+ "loss": 1.6177,
2901
+ "step": 413
2902
+ },
2903
+ {
2904
+ "epoch": 2.314165497896213,
2905
+ "grad_norm": 13.353110396095241,
2906
+ "learning_rate": 4.288569805850059e-06,
2907
+ "loss": 1.2242,
2908
+ "step": 414
2909
+ },
2910
+ {
2911
+ "epoch": 2.3197755960729314,
2912
+ "grad_norm": 10.438909781171024,
2913
+ "learning_rate": 4.2522563802764255e-06,
2914
+ "loss": 1.1428,
2915
+ "step": 415
2916
+ },
2917
+ {
2918
+ "epoch": 2.3253856942496496,
2919
+ "grad_norm": 12.320780473313446,
2920
+ "learning_rate": 4.216192113310895e-06,
2921
+ "loss": 1.2153,
2922
+ "step": 416
2923
+ },
2924
+ {
2925
+ "epoch": 2.3309957924263673,
2926
+ "grad_norm": 18.27797507745344,
2927
+ "learning_rate": 4.180378336616773e-06,
2928
+ "loss": 1.2527,
2929
+ "step": 417
2930
+ },
2931
+ {
2932
+ "epoch": 2.3366058906030855,
2933
+ "grad_norm": 12.936629262597277,
2934
+ "learning_rate": 4.144816372608073e-06,
2935
+ "loss": 1.3047,
2936
+ "step": 418
2937
+ },
2938
+ {
2939
+ "epoch": 2.3422159887798037,
2940
+ "grad_norm": 15.908852685108856,
2941
+ "learning_rate": 4.1095075344007014e-06,
2942
+ "loss": 1.5486,
2943
+ "step": 419
2944
+ },
2945
+ {
2946
+ "epoch": 2.3478260869565215,
2947
+ "grad_norm": 15.463005059149006,
2948
+ "learning_rate": 4.074453125763961e-06,
2949
+ "loss": 1.3726,
2950
+ "step": 420
2951
+ },
2952
+ {
2953
+ "epoch": 2.3534361851332397,
2954
+ "grad_norm": 14.294215716320576,
2955
+ "learning_rate": 4.039654441072405e-06,
2956
+ "loss": 1.3427,
2957
+ "step": 421
2958
+ },
2959
+ {
2960
+ "epoch": 2.359046283309958,
2961
+ "grad_norm": 15.768455718275225,
2962
+ "learning_rate": 4.005112765258057e-06,
2963
+ "loss": 1.3789,
2964
+ "step": 422
2965
+ },
2966
+ {
2967
+ "epoch": 2.364656381486676,
2968
+ "grad_norm": 10.424506077881269,
2969
+ "learning_rate": 3.970829373762954e-06,
2970
+ "loss": 1.3088,
2971
+ "step": 423
2972
+ },
2973
+ {
2974
+ "epoch": 2.3702664796633943,
2975
+ "grad_norm": 10.54267428276343,
2976
+ "learning_rate": 3.936805532492052e-06,
2977
+ "loss": 1.6141,
2978
+ "step": 424
2979
+ },
2980
+ {
2981
+ "epoch": 2.375876577840112,
2982
+ "grad_norm": 16.449513998300606,
2983
+ "learning_rate": 3.90304249776649e-06,
2984
+ "loss": 1.3617,
2985
+ "step": 425
2986
+ },
2987
+ {
2988
+ "epoch": 2.3814866760168303,
2989
+ "grad_norm": 13.957587681425652,
2990
+ "learning_rate": 3.869541516277191e-06,
2991
+ "loss": 1.4329,
2992
+ "step": 426
2993
+ },
2994
+ {
2995
+ "epoch": 2.3870967741935485,
2996
+ "grad_norm": 9.769958163690362,
2997
+ "learning_rate": 3.836303825038835e-06,
2998
+ "loss": 1.5039,
2999
+ "step": 427
3000
+ },
3001
+ {
3002
+ "epoch": 2.3927068723702662,
3003
+ "grad_norm": 16.573641265327524,
3004
+ "learning_rate": 3.8033306513441876e-06,
3005
+ "loss": 1.4685,
3006
+ "step": 428
3007
+ },
3008
+ {
3009
+ "epoch": 2.3983169705469845,
3010
+ "grad_norm": 61.04935510961862,
3011
+ "learning_rate": 3.770623212718765e-06,
3012
+ "loss": 1.5065,
3013
+ "step": 429
3014
+ },
3015
+ {
3016
+ "epoch": 2.4039270687237027,
3017
+ "grad_norm": 13.814139766936878,
3018
+ "learning_rate": 3.7381827168758867e-06,
3019
+ "loss": 1.708,
3020
+ "step": 430
3021
+ },
3022
+ {
3023
+ "epoch": 2.409537166900421,
3024
+ "grad_norm": 12.20707134653504,
3025
+ "learning_rate": 3.7060103616720973e-06,
3026
+ "loss": 1.4541,
3027
+ "step": 431
3028
+ },
3029
+ {
3030
+ "epoch": 2.415147265077139,
3031
+ "grad_norm": 10.831279423590178,
3032
+ "learning_rate": 3.674107335062903e-06,
3033
+ "loss": 1.3367,
3034
+ "step": 432
3035
+ },
3036
+ {
3037
+ "epoch": 2.420757363253857,
3038
+ "grad_norm": 15.17957774472088,
3039
+ "learning_rate": 3.6424748150589383e-06,
3040
+ "loss": 1.4451,
3041
+ "step": 433
3042
+ },
3043
+ {
3044
+ "epoch": 2.426367461430575,
3045
+ "grad_norm": 43.88550095171706,
3046
+ "learning_rate": 3.6111139696824445e-06,
3047
+ "loss": 1.4897,
3048
+ "step": 434
3049
+ },
3050
+ {
3051
+ "epoch": 2.4319775596072932,
3052
+ "grad_norm": 17.022424144766777,
3053
+ "learning_rate": 3.5800259569241597e-06,
3054
+ "loss": 1.4451,
3055
+ "step": 435
3056
+ },
3057
+ {
3058
+ "epoch": 2.437587657784011,
3059
+ "grad_norm": 13.121459950014572,
3060
+ "learning_rate": 3.5492119247005406e-06,
3061
+ "loss": 1.4099,
3062
+ "step": 436
3063
+ },
3064
+ {
3065
+ "epoch": 2.443197755960729,
3066
+ "grad_norm": 20.854465875412313,
3067
+ "learning_rate": 3.5186730108113966e-06,
3068
+ "loss": 1.3777,
3069
+ "step": 437
3070
+ },
3071
+ {
3072
+ "epoch": 2.4488078541374474,
3073
+ "grad_norm": 19.82240306986655,
3074
+ "learning_rate": 3.4884103428978655e-06,
3075
+ "loss": 1.3162,
3076
+ "step": 438
3077
+ },
3078
+ {
3079
+ "epoch": 2.4544179523141656,
3080
+ "grad_norm": 19.743675967797774,
3081
+ "learning_rate": 3.4584250384007687e-06,
3082
+ "loss": 1.3477,
3083
+ "step": 439
3084
+ },
3085
+ {
3086
+ "epoch": 2.460028050490884,
3087
+ "grad_norm": 31.40435038497109,
3088
+ "learning_rate": 3.428718204519369e-06,
3089
+ "loss": 1.2245,
3090
+ "step": 440
3091
+ },
3092
+ {
3093
+ "epoch": 2.4656381486676016,
3094
+ "grad_norm": 10.470367402241134,
3095
+ "learning_rate": 3.399290938170473e-06,
3096
+ "loss": 1.5034,
3097
+ "step": 441
3098
+ },
3099
+ {
3100
+ "epoch": 2.4712482468443198,
3101
+ "grad_norm": 10.92687941414526,
3102
+ "learning_rate": 3.3701443259479293e-06,
3103
+ "loss": 1.4631,
3104
+ "step": 442
3105
+ },
3106
+ {
3107
+ "epoch": 2.476858345021038,
3108
+ "grad_norm": 11.388896835005518,
3109
+ "learning_rate": 3.3412794440825138e-06,
3110
+ "loss": 1.4725,
3111
+ "step": 443
3112
+ },
3113
+ {
3114
+ "epoch": 2.4824684431977557,
3115
+ "grad_norm": 9.600043291161747,
3116
+ "learning_rate": 3.3126973584021793e-06,
3117
+ "loss": 1.3522,
3118
+ "step": 444
3119
+ },
3120
+ {
3121
+ "epoch": 2.488078541374474,
3122
+ "grad_norm": 14.275333506225046,
3123
+ "learning_rate": 3.2843991242927097e-06,
3124
+ "loss": 1.2974,
3125
+ "step": 445
3126
+ },
3127
+ {
3128
+ "epoch": 2.493688639551192,
3129
+ "grad_norm": 12.809793948684316,
3130
+ "learning_rate": 3.256385786658748e-06,
3131
+ "loss": 1.3043,
3132
+ "step": 446
3133
+ },
3134
+ {
3135
+ "epoch": 2.4992987377279103,
3136
+ "grad_norm": 13.618393566140282,
3137
+ "learning_rate": 3.2286583798852045e-06,
3138
+ "loss": 1.3538,
3139
+ "step": 447
3140
+ },
3141
+ {
3142
+ "epoch": 2.5049088359046285,
3143
+ "grad_norm": 33.32529639238415,
3144
+ "learning_rate": 3.2012179277990814e-06,
3145
+ "loss": 1.4108,
3146
+ "step": 448
3147
+ },
3148
+ {
3149
+ "epoch": 2.5105189340813463,
3150
+ "grad_norm": 14.302476484830468,
3151
+ "learning_rate": 3.1740654436316443e-06,
3152
+ "loss": 1.4814,
3153
+ "step": 449
3154
+ },
3155
+ {
3156
+ "epoch": 2.5161290322580645,
3157
+ "grad_norm": 16.872183410879636,
3158
+ "learning_rate": 3.1472019299810296e-06,
3159
+ "loss": 1.4011,
3160
+ "step": 450
3161
+ },
3162
+ {
3163
+ "epoch": 2.5217391304347827,
3164
+ "grad_norm": 8.486077404359225,
3165
+ "learning_rate": 3.120628378775216e-06,
3166
+ "loss": 1.4185,
3167
+ "step": 451
3168
+ },
3169
+ {
3170
+ "epoch": 2.5273492286115005,
3171
+ "grad_norm": 11.71800465321583,
3172
+ "learning_rate": 3.09434577123539e-06,
3173
+ "loss": 1.2849,
3174
+ "step": 452
3175
+ },
3176
+ {
3177
+ "epoch": 2.5329593267882187,
3178
+ "grad_norm": 14.853119779061982,
3179
+ "learning_rate": 3.06835507783973e-06,
3180
+ "loss": 1.5649,
3181
+ "step": 453
3182
+ },
3183
+ {
3184
+ "epoch": 2.538569424964937,
3185
+ "grad_norm": 65.58056847644397,
3186
+ "learning_rate": 3.0426572582875557e-06,
3187
+ "loss": 1.6189,
3188
+ "step": 454
3189
+ },
3190
+ {
3191
+ "epoch": 2.544179523141655,
3192
+ "grad_norm": 12.530368758694713,
3193
+ "learning_rate": 3.017253261463906e-06,
3194
+ "loss": 1.3411,
3195
+ "step": 455
3196
+ },
3197
+ {
3198
+ "epoch": 2.5497896213183733,
3199
+ "grad_norm": 11.055810688338571,
3200
+ "learning_rate": 2.992144025404495e-06,
3201
+ "loss": 1.5449,
3202
+ "step": 456
3203
+ },
3204
+ {
3205
+ "epoch": 2.555399719495091,
3206
+ "grad_norm": 27.908163066834966,
3207
+ "learning_rate": 2.9673304772610702e-06,
3208
+ "loss": 1.4863,
3209
+ "step": 457
3210
+ },
3211
+ {
3212
+ "epoch": 2.5610098176718092,
3213
+ "grad_norm": 19.88479296802079,
3214
+ "learning_rate": 2.942813533267185e-06,
3215
+ "loss": 1.5879,
3216
+ "step": 458
3217
+ },
3218
+ {
3219
+ "epoch": 2.5666199158485274,
3220
+ "grad_norm": 12.78986539705726,
3221
+ "learning_rate": 2.918594098704367e-06,
3222
+ "loss": 1.697,
3223
+ "step": 459
3224
+ },
3225
+ {
3226
+ "epoch": 2.572230014025245,
3227
+ "grad_norm": 10.969686431630194,
3228
+ "learning_rate": 2.894673067868685e-06,
3229
+ "loss": 1.338,
3230
+ "step": 460
3231
+ },
3232
+ {
3233
+ "epoch": 2.5778401122019634,
3234
+ "grad_norm": 24.827786234269603,
3235
+ "learning_rate": 2.871051324037735e-06,
3236
+ "loss": 1.5449,
3237
+ "step": 461
3238
+ },
3239
+ {
3240
+ "epoch": 2.5834502103786816,
3241
+ "grad_norm": 9.469236231927587,
3242
+ "learning_rate": 2.8477297394380167e-06,
3243
+ "loss": 1.5096,
3244
+ "step": 462
3245
+ },
3246
+ {
3247
+ "epoch": 2.5890603085554,
3248
+ "grad_norm": 17.452000273913672,
3249
+ "learning_rate": 2.824709175212734e-06,
3250
+ "loss": 1.5273,
3251
+ "step": 463
3252
+ },
3253
+ {
3254
+ "epoch": 2.594670406732118,
3255
+ "grad_norm": 13.256452994228898,
3256
+ "learning_rate": 2.8019904813899974e-06,
3257
+ "loss": 1.5446,
3258
+ "step": 464
3259
+ },
3260
+ {
3261
+ "epoch": 2.6002805049088358,
3262
+ "grad_norm": 23.868494536366445,
3263
+ "learning_rate": 2.7795744968514276e-06,
3264
+ "loss": 1.2896,
3265
+ "step": 465
3266
+ },
3267
+ {
3268
+ "epoch": 2.605890603085554,
3269
+ "grad_norm": 21.606197493757485,
3270
+ "learning_rate": 2.7574620493011945e-06,
3271
+ "loss": 1.348,
3272
+ "step": 466
3273
+ },
3274
+ {
3275
+ "epoch": 2.611500701262272,
3276
+ "grad_norm": 16.889481350139746,
3277
+ "learning_rate": 2.735653955235441e-06,
3278
+ "loss": 1.6912,
3279
+ "step": 467
3280
+ },
3281
+ {
3282
+ "epoch": 2.61711079943899,
3283
+ "grad_norm": 10.061444950954213,
3284
+ "learning_rate": 2.7141510199121402e-06,
3285
+ "loss": 1.4507,
3286
+ "step": 468
3287
+ },
3288
+ {
3289
+ "epoch": 2.622720897615708,
3290
+ "grad_norm": 22.843764698250858,
3291
+ "learning_rate": 2.6929540373213693e-06,
3292
+ "loss": 1.1088,
3293
+ "step": 469
3294
+ },
3295
+ {
3296
+ "epoch": 2.6283309957924264,
3297
+ "grad_norm": 57.54763815772751,
3298
+ "learning_rate": 2.672063790155971e-06,
3299
+ "loss": 1.3167,
3300
+ "step": 470
3301
+ },
3302
+ {
3303
+ "epoch": 2.6339410939691446,
3304
+ "grad_norm": 14.012079397996779,
3305
+ "learning_rate": 2.6514810497826755e-06,
3306
+ "loss": 1.4568,
3307
+ "step": 471
3308
+ },
3309
+ {
3310
+ "epoch": 2.6395511921458628,
3311
+ "grad_norm": 11.72337445092119,
3312
+ "learning_rate": 2.6312065762136023e-06,
3313
+ "loss": 1.3044,
3314
+ "step": 472
3315
+ },
3316
+ {
3317
+ "epoch": 2.6451612903225805,
3318
+ "grad_norm": 11.45913716212178,
3319
+ "learning_rate": 2.611241118078204e-06,
3320
+ "loss": 1.6543,
3321
+ "step": 473
3322
+ },
3323
+ {
3324
+ "epoch": 2.6507713884992987,
3325
+ "grad_norm": 34.40436182190075,
3326
+ "learning_rate": 2.5915854125956237e-06,
3327
+ "loss": 1.1738,
3328
+ "step": 474
3329
+ },
3330
+ {
3331
+ "epoch": 2.656381486676017,
3332
+ "grad_norm": 29.120537029967604,
3333
+ "learning_rate": 2.572240185547469e-06,
3334
+ "loss": 1.4869,
3335
+ "step": 475
3336
+ },
3337
+ {
3338
+ "epoch": 2.6619915848527347,
3339
+ "grad_norm": 29.563771869983317,
3340
+ "learning_rate": 2.553206151251016e-06,
3341
+ "loss": 1.3673,
3342
+ "step": 476
3343
+ },
3344
+ {
3345
+ "epoch": 2.667601683029453,
3346
+ "grad_norm": 11.031768588310115,
3347
+ "learning_rate": 2.534484012532836e-06,
3348
+ "loss": 1.4917,
3349
+ "step": 477
3350
+ },
3351
+ {
3352
+ "epoch": 2.673211781206171,
3353
+ "grad_norm": 31.01674108223117,
3354
+ "learning_rate": 2.516074460702832e-06,
3355
+ "loss": 1.5571,
3356
+ "step": 478
3357
+ },
3358
+ {
3359
+ "epoch": 2.6788218793828893,
3360
+ "grad_norm": 12.153681224765794,
3361
+ "learning_rate": 2.497978175528732e-06,
3362
+ "loss": 1.4064,
3363
+ "step": 479
3364
+ },
3365
+ {
3366
+ "epoch": 2.6844319775596075,
3367
+ "grad_norm": 16.113536815274934,
3368
+ "learning_rate": 2.480195825210967e-06,
3369
+ "loss": 1.4236,
3370
+ "step": 480
3371
+ },
3372
+ {
3373
+ "epoch": 2.6900420757363253,
3374
+ "grad_norm": 31.39801837106822,
3375
+ "learning_rate": 2.4627280663580147e-06,
3376
+ "loss": 1.46,
3377
+ "step": 481
3378
+ },
3379
+ {
3380
+ "epoch": 2.6956521739130435,
3381
+ "grad_norm": 11.928549532005288,
3382
+ "learning_rate": 2.445575543962148e-06,
3383
+ "loss": 1.293,
3384
+ "step": 482
3385
+ },
3386
+ {
3387
+ "epoch": 2.7012622720897617,
3388
+ "grad_norm": 12.85881510061664,
3389
+ "learning_rate": 2.428738891375614e-06,
3390
+ "loss": 1.2772,
3391
+ "step": 483
3392
+ },
3393
+ {
3394
+ "epoch": 2.7068723702664794,
3395
+ "grad_norm": 8.372011225533786,
3396
+ "learning_rate": 2.412218730287258e-06,
3397
+ "loss": 1.3799,
3398
+ "step": 484
3399
+ },
3400
+ {
3401
+ "epoch": 2.7124824684431976,
3402
+ "grad_norm": 11.043459774828861,
3403
+ "learning_rate": 2.3960156706995585e-06,
3404
+ "loss": 1.6409,
3405
+ "step": 485
3406
+ },
3407
+ {
3408
+ "epoch": 2.718092566619916,
3409
+ "grad_norm": 15.24112173517461,
3410
+ "learning_rate": 2.3801303109061096e-06,
3411
+ "loss": 1.4136,
3412
+ "step": 486
3413
+ },
3414
+ {
3415
+ "epoch": 2.723702664796634,
3416
+ "grad_norm": 16.22094507739665,
3417
+ "learning_rate": 2.364563237469525e-06,
3418
+ "loss": 1.3207,
3419
+ "step": 487
3420
+ },
3421
+ {
3422
+ "epoch": 2.7293127629733522,
3423
+ "grad_norm": 30.720161936633414,
3424
+ "learning_rate": 2.3493150251997785e-06,
3425
+ "loss": 1.3901,
3426
+ "step": 488
3427
+ },
3428
+ {
3429
+ "epoch": 2.73492286115007,
3430
+ "grad_norm": 11.60665902371763,
3431
+ "learning_rate": 2.334386237132988e-06,
3432
+ "loss": 1.4446,
3433
+ "step": 489
3434
+ },
3435
+ {
3436
+ "epoch": 2.740532959326788,
3437
+ "grad_norm": 12.321559833371005,
3438
+ "learning_rate": 2.3197774245106123e-06,
3439
+ "loss": 1.5776,
3440
+ "step": 490
3441
+ },
3442
+ {
3443
+ "epoch": 2.7461430575035064,
3444
+ "grad_norm": 18.09110828044268,
3445
+ "learning_rate": 2.3054891267591058e-06,
3446
+ "loss": 1.7661,
3447
+ "step": 491
3448
+ },
3449
+ {
3450
+ "epoch": 2.751753155680224,
3451
+ "grad_norm": 23.7097652014346,
3452
+ "learning_rate": 2.2915218714700015e-06,
3453
+ "loss": 1.395,
3454
+ "step": 492
3455
+ },
3456
+ {
3457
+ "epoch": 2.7573632538569424,
3458
+ "grad_norm": 19.104951625171648,
3459
+ "learning_rate": 2.2778761743804203e-06,
3460
+ "loss": 1.4431,
3461
+ "step": 493
3462
+ },
3463
+ {
3464
+ "epoch": 2.7629733520336606,
3465
+ "grad_norm": 11.734034381021537,
3466
+ "learning_rate": 2.2645525393540367e-06,
3467
+ "loss": 1.3761,
3468
+ "step": 494
3469
+ },
3470
+ {
3471
+ "epoch": 2.7685834502103788,
3472
+ "grad_norm": 16.148466253040784,
3473
+ "learning_rate": 2.2515514583624734e-06,
3474
+ "loss": 1.447,
3475
+ "step": 495
3476
+ },
3477
+ {
3478
+ "epoch": 2.774193548387097,
3479
+ "grad_norm": 8.3147371004889,
3480
+ "learning_rate": 2.238873411467126e-06,
3481
+ "loss": 1.5092,
3482
+ "step": 496
3483
+ },
3484
+ {
3485
+ "epoch": 2.7798036465638147,
3486
+ "grad_norm": 21.439520034906838,
3487
+ "learning_rate": 2.2265188668014505e-06,
3488
+ "loss": 1.6416,
3489
+ "step": 497
3490
+ },
3491
+ {
3492
+ "epoch": 2.785413744740533,
3493
+ "grad_norm": 15.340300048538328,
3494
+ "learning_rate": 2.214488280553663e-06,
3495
+ "loss": 1.4431,
3496
+ "step": 498
3497
+ },
3498
+ {
3499
+ "epoch": 2.791023842917251,
3500
+ "grad_norm": 23.64651049663467,
3501
+ "learning_rate": 2.2027820969499108e-06,
3502
+ "loss": 1.4033,
3503
+ "step": 499
3504
+ },
3505
+ {
3506
+ "epoch": 2.796633941093969,
3507
+ "grad_norm": 17.537330409566238,
3508
+ "learning_rate": 2.191400748237856e-06,
3509
+ "loss": 1.4545,
3510
+ "step": 500
3511
+ },
3512
+ {
3513
+ "epoch": 2.802244039270687,
3514
+ "grad_norm": 13.387827217231019,
3515
+ "learning_rate": 2.1803446546707198e-06,
3516
+ "loss": 1.4768,
3517
+ "step": 501
3518
+ },
3519
+ {
3520
+ "epoch": 2.8078541374474053,
3521
+ "grad_norm": 12.31938827830441,
3522
+ "learning_rate": 2.1696142244917686e-06,
3523
+ "loss": 1.3513,
3524
+ "step": 502
3525
+ },
3526
+ {
3527
+ "epoch": 2.8134642356241235,
3528
+ "grad_norm": 9.56317144380213,
3529
+ "learning_rate": 2.1592098539192337e-06,
3530
+ "loss": 1.4576,
3531
+ "step": 503
3532
+ },
3533
+ {
3534
+ "epoch": 2.8190743338008417,
3535
+ "grad_norm": 15.664022628440854,
3536
+ "learning_rate": 2.149131927131684e-06,
3537
+ "loss": 1.2717,
3538
+ "step": 504
3539
+ },
3540
+ {
3541
+ "epoch": 2.8246844319775595,
3542
+ "grad_norm": 10.973638788779944,
3543
+ "learning_rate": 2.1393808162538427e-06,
3544
+ "loss": 1.5818,
3545
+ "step": 505
3546
+ },
3547
+ {
3548
+ "epoch": 2.8302945301542777,
3549
+ "grad_norm": 11.696358945441519,
3550
+ "learning_rate": 2.129956881342839e-06,
3551
+ "loss": 1.5115,
3552
+ "step": 506
3553
+ },
3554
+ {
3555
+ "epoch": 2.835904628330996,
3556
+ "grad_norm": 35.59073283765529,
3557
+ "learning_rate": 2.120860470374923e-06,
3558
+ "loss": 1.4507,
3559
+ "step": 507
3560
+ },
3561
+ {
3562
+ "epoch": 2.8415147265077136,
3563
+ "grad_norm": 8.511008157750572,
3564
+ "learning_rate": 2.1120919192326068e-06,
3565
+ "loss": 1.6359,
3566
+ "step": 508
3567
+ },
3568
+ {
3569
+ "epoch": 2.847124824684432,
3570
+ "grad_norm": 14.507595686447782,
3571
+ "learning_rate": 2.1036515516922707e-06,
3572
+ "loss": 1.3308,
3573
+ "step": 509
3574
+ },
3575
+ {
3576
+ "epoch": 2.85273492286115,
3577
+ "grad_norm": 10.038060403672617,
3578
+ "learning_rate": 2.0955396794122045e-06,
3579
+ "loss": 1.538,
3580
+ "step": 510
3581
+ },
3582
+ {
3583
+ "epoch": 2.8583450210378682,
3584
+ "grad_norm": 38.19467872715205,
3585
+ "learning_rate": 2.0877566019210964e-06,
3586
+ "loss": 1.27,
3587
+ "step": 511
3588
+ },
3589
+ {
3590
+ "epoch": 2.8639551192145865,
3591
+ "grad_norm": 11.1405731929677,
3592
+ "learning_rate": 2.08030260660698e-06,
3593
+ "loss": 1.5597,
3594
+ "step": 512
3595
+ },
3596
+ {
3597
+ "epoch": 2.869565217391304,
3598
+ "grad_norm": 39.113003355372584,
3599
+ "learning_rate": 2.0731779687066127e-06,
3600
+ "loss": 1.4037,
3601
+ "step": 513
3602
+ },
3603
+ {
3604
+ "epoch": 2.8751753155680224,
3605
+ "grad_norm": 16.446670937193733,
3606
+ "learning_rate": 2.0663829512953256e-06,
3607
+ "loss": 1.782,
3608
+ "step": 514
3609
+ },
3610
+ {
3611
+ "epoch": 2.8807854137447406,
3612
+ "grad_norm": 13.566342003661653,
3613
+ "learning_rate": 2.0599178052772954e-06,
3614
+ "loss": 1.4863,
3615
+ "step": 515
3616
+ },
3617
+ {
3618
+ "epoch": 2.8863955119214584,
3619
+ "grad_norm": 9.581024556505865,
3620
+ "learning_rate": 2.0537827693762933e-06,
3621
+ "loss": 1.4205,
3622
+ "step": 516
3623
+ },
3624
+ {
3625
+ "epoch": 2.8920056100981766,
3626
+ "grad_norm": 12.300805161743316,
3627
+ "learning_rate": 2.047978070126859e-06,
3628
+ "loss": 1.1908,
3629
+ "step": 517
3630
+ },
3631
+ {
3632
+ "epoch": 2.897615708274895,
3633
+ "grad_norm": 9.130444624343745,
3634
+ "learning_rate": 2.042503921865942e-06,
3635
+ "loss": 1.4871,
3636
+ "step": 518
3637
+ },
3638
+ {
3639
+ "epoch": 2.903225806451613,
3640
+ "grad_norm": 12.067399220622741,
3641
+ "learning_rate": 2.0373605267249856e-06,
3642
+ "loss": 1.3093,
3643
+ "step": 519
3644
+ },
3645
+ {
3646
+ "epoch": 2.908835904628331,
3647
+ "grad_norm": 20.77624775965342,
3648
+ "learning_rate": 2.0325480746224673e-06,
3649
+ "loss": 1.4156,
3650
+ "step": 520
3651
+ },
3652
+ {
3653
+ "epoch": 2.914446002805049,
3654
+ "grad_norm": 21.80730408847714,
3655
+ "learning_rate": 2.028066743256877e-06,
3656
+ "loss": 1.4802,
3657
+ "step": 521
3658
+ },
3659
+ {
3660
+ "epoch": 2.920056100981767,
3661
+ "grad_norm": 14.511189470372491,
3662
+ "learning_rate": 2.0239166981001634e-06,
3663
+ "loss": 1.3718,
3664
+ "step": 522
3665
+ },
3666
+ {
3667
+ "epoch": 2.9256661991584854,
3668
+ "grad_norm": 24.388657913241754,
3669
+ "learning_rate": 2.020098092391622e-06,
3670
+ "loss": 1.3668,
3671
+ "step": 523
3672
+ },
3673
+ {
3674
+ "epoch": 2.931276297335203,
3675
+ "grad_norm": 17.0026639824568,
3676
+ "learning_rate": 2.0166110671322357e-06,
3677
+ "loss": 1.1583,
3678
+ "step": 524
3679
+ },
3680
+ {
3681
+ "epoch": 2.9368863955119213,
3682
+ "grad_norm": 13.89007224984465,
3683
+ "learning_rate": 2.013455751079469e-06,
3684
+ "loss": 1.6348,
3685
+ "step": 525
3686
+ },
3687
+ {
3688
+ "epoch": 2.9424964936886395,
3689
+ "grad_norm": 29.20020338138551,
3690
+ "learning_rate": 2.0106322607425133e-06,
3691
+ "loss": 1.4146,
3692
+ "step": 526
3693
+ },
3694
+ {
3695
+ "epoch": 2.9481065918653577,
3696
+ "grad_norm": 13.501005733452327,
3697
+ "learning_rate": 2.008140700377986e-06,
3698
+ "loss": 1.3433,
3699
+ "step": 527
3700
+ },
3701
+ {
3702
+ "epoch": 2.953716690042076,
3703
+ "grad_norm": 11.018447723777982,
3704
+ "learning_rate": 2.00598116198608e-06,
3705
+ "loss": 1.4188,
3706
+ "step": 528
3707
+ },
3708
+ {
3709
+ "epoch": 2.9593267882187937,
3710
+ "grad_norm": 11.852148777496266,
3711
+ "learning_rate": 2.0041537253071663e-06,
3712
+ "loss": 1.1538,
3713
+ "step": 529
3714
+ },
3715
+ {
3716
+ "epoch": 2.964936886395512,
3717
+ "grad_norm": 11.760188283123103,
3718
+ "learning_rate": 2.002658457818849e-06,
3719
+ "loss": 1.6892,
3720
+ "step": 530
3721
+ },
3722
+ {
3723
+ "epoch": 2.97054698457223,
3724
+ "grad_norm": 11.470050929329823,
3725
+ "learning_rate": 2.0014954147334775e-06,
3726
+ "loss": 1.5842,
3727
+ "step": 531
3728
+ },
3729
+ {
3730
+ "epoch": 2.976157082748948,
3731
+ "grad_norm": 29.651793877306915,
3732
+ "learning_rate": 2.000664638996101e-06,
3733
+ "loss": 1.2993,
3734
+ "step": 532
3735
+ },
3736
+ {
3737
+ "epoch": 2.981767180925666,
3738
+ "grad_norm": 10.304539896385103,
3739
+ "learning_rate": 2.0001661612828903e-06,
3740
+ "loss": 1.3348,
3741
+ "step": 533
3742
+ },
3743
+ {
3744
+ "epoch": 2.9873772791023843,
3745
+ "grad_norm": 12.263498025363374,
3746
+ "learning_rate": 2.0000000000000003e-06,
3747
+ "loss": 1.4921,
3748
+ "step": 534
3749
+ },
3750
+ {
3751
+ "epoch": 2.9873772791023843,
3752
+ "step": 534,
3753
+ "total_flos": 262773815541760.0,
3754
+ "train_loss": 1.490938222363647,
3755
+ "train_runtime": 43801.9866,
3756
+ "train_samples_per_second": 1.563,
3757
+ "train_steps_per_second": 0.012
3758
+ }
3759
+ ],
3760
+ "logging_steps": 1.0,
3761
+ "max_steps": 534,
3762
+ "num_input_tokens_seen": 0,
3763
+ "num_train_epochs": 3,
3764
+ "save_steps": 50,
3765
+ "stateful_callbacks": {
3766
+ "TrainerControl": {
3767
+ "args": {
3768
+ "should_epoch_stop": false,
3769
+ "should_evaluate": false,
3770
+ "should_log": false,
3771
+ "should_save": true,
3772
+ "should_training_stop": true
3773
+ },
3774
+ "attributes": {}
3775
+ }
3776
+ },
3777
+ "total_flos": 262773815541760.0,
3778
+ "train_batch_size": 4,
3779
+ "trial_name": null,
3780
+ "trial_params": null
3781
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42ccc01b6cc30f097b3d9e4434206dee3084f3d984c4357115a3f96767ec4a40
3
+ size 8120