|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9988545246277205, |
|
"eval_steps": 500, |
|
"global_step": 654, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0015273004963726614, |
|
"grad_norm": 5.274756775371289, |
|
"learning_rate": 1.5151515151515152e-07, |
|
"loss": 1.3724, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.015273004963726614, |
|
"grad_norm": 2.4356823026963044, |
|
"learning_rate": 1.5151515151515152e-06, |
|
"loss": 1.3215, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.030546009927453228, |
|
"grad_norm": 0.8415959934340891, |
|
"learning_rate": 3.0303030303030305e-06, |
|
"loss": 0.7041, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.045819014891179836, |
|
"grad_norm": 0.3951472895674848, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 0.3621, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.061092019854906456, |
|
"grad_norm": 0.3302170549897179, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 0.2555, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07636502481863307, |
|
"grad_norm": 0.3855993238810659, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 0.21, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09163802978235967, |
|
"grad_norm": 0.3643941392356843, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.1707, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.10691103474608629, |
|
"grad_norm": 0.24356795945348528, |
|
"learning_rate": 9.99885820390154e-06, |
|
"loss": 0.1628, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12218403970981291, |
|
"grad_norm": 0.18282398542383, |
|
"learning_rate": 9.986018985905901e-06, |
|
"loss": 0.1537, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.13745704467353953, |
|
"grad_norm": 0.17824704057472698, |
|
"learning_rate": 9.95895006911623e-06, |
|
"loss": 0.1472, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.15273004963726614, |
|
"grad_norm": 0.20039388213383333, |
|
"learning_rate": 9.917728706052765e-06, |
|
"loss": 0.1512, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16800305460099274, |
|
"grad_norm": 0.21394666608149668, |
|
"learning_rate": 9.862472539183757e-06, |
|
"loss": 0.1446, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.18327605956471935, |
|
"grad_norm": 0.1399694568156125, |
|
"learning_rate": 9.793339265183303e-06, |
|
"loss": 0.1379, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.19854906452844598, |
|
"grad_norm": 0.17756723955927178, |
|
"learning_rate": 9.710526184877667e-06, |
|
"loss": 0.1446, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.21382206949217258, |
|
"grad_norm": 0.1256183224074801, |
|
"learning_rate": 9.61426964016452e-06, |
|
"loss": 0.1598, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2290950744558992, |
|
"grad_norm": 0.1925308832459223, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 0.1703, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.24436807941962582, |
|
"grad_norm": 0.18425814277453983, |
|
"learning_rate": 9.382562573963238e-06, |
|
"loss": 0.1347, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2596410843833524, |
|
"grad_norm": 0.16364274896349304, |
|
"learning_rate": 9.24777332588177e-06, |
|
"loss": 0.1291, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.27491408934707906, |
|
"grad_norm": 0.1575283767974318, |
|
"learning_rate": 9.10086127298478e-06, |
|
"loss": 0.1455, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.29018709431080564, |
|
"grad_norm": 0.16299547711079815, |
|
"learning_rate": 8.94224569050324e-06, |
|
"loss": 0.1448, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.30546009927453227, |
|
"grad_norm": 0.1678304349758231, |
|
"learning_rate": 8.772379254604074e-06, |
|
"loss": 0.1329, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3207331042382589, |
|
"grad_norm": 0.18133792526251943, |
|
"learning_rate": 8.591746750488639e-06, |
|
"loss": 0.1523, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.3360061092019855, |
|
"grad_norm": 0.138988177087972, |
|
"learning_rate": 8.400863688854598e-06, |
|
"loss": 0.1487, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3512791141657121, |
|
"grad_norm": 0.10975239156978413, |
|
"learning_rate": 8.200274834669675e-06, |
|
"loss": 0.1454, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3665521191294387, |
|
"grad_norm": 0.13022143377921225, |
|
"learning_rate": 7.99055265245608e-06, |
|
"loss": 0.1414, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3818251240931653, |
|
"grad_norm": 0.09325598021130543, |
|
"learning_rate": 7.772295672522615e-06, |
|
"loss": 0.1406, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.39709812905689196, |
|
"grad_norm": 0.10192417440664955, |
|
"learning_rate": 7.546126782807117e-06, |
|
"loss": 0.1243, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.41237113402061853, |
|
"grad_norm": 0.10214456159318729, |
|
"learning_rate": 7.312691451204178e-06, |
|
"loss": 0.1381, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.42764413898434517, |
|
"grad_norm": 0.11650402115334278, |
|
"learning_rate": 7.072655883451478e-06, |
|
"loss": 0.1155, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.4429171439480718, |
|
"grad_norm": 0.09899147898887324, |
|
"learning_rate": 6.8267051218319766e-06, |
|
"loss": 0.1372, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4581901489117984, |
|
"grad_norm": 0.11909991495605356, |
|
"learning_rate": 6.575541090118105e-06, |
|
"loss": 0.1449, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.473463153875525, |
|
"grad_norm": 0.11324258672549159, |
|
"learning_rate": 6.319880590337549e-06, |
|
"loss": 0.1355, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.48873615883925164, |
|
"grad_norm": 0.15684721556128514, |
|
"learning_rate": 6.060453257077686e-06, |
|
"loss": 0.1448, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5040091638029782, |
|
"grad_norm": 0.13428699506671887, |
|
"learning_rate": 5.797999475166897e-06, |
|
"loss": 0.161, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5192821687667049, |
|
"grad_norm": 0.16260953903247685, |
|
"learning_rate": 5.533268266675601e-06, |
|
"loss": 0.1301, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5345551737304315, |
|
"grad_norm": 0.138117775450493, |
|
"learning_rate": 5.267015153267246e-06, |
|
"loss": 0.1153, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5498281786941581, |
|
"grad_norm": 0.09342193422842268, |
|
"learning_rate": 5e-06, |
|
"loss": 0.1388, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.5651011836578846, |
|
"grad_norm": 0.12521354455054481, |
|
"learning_rate": 4.732984846732755e-06, |
|
"loss": 0.1469, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5803741886216113, |
|
"grad_norm": 0.12074643796303608, |
|
"learning_rate": 4.466731733324399e-06, |
|
"loss": 0.1336, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5956471935853379, |
|
"grad_norm": 0.10811612216811932, |
|
"learning_rate": 4.2020005248331056e-06, |
|
"loss": 0.1444, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.6109201985490645, |
|
"grad_norm": 0.07932149519427452, |
|
"learning_rate": 3.939546742922318e-06, |
|
"loss": 0.1217, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.6261932035127912, |
|
"grad_norm": 0.13495076484320434, |
|
"learning_rate": 3.6801194096624515e-06, |
|
"loss": 0.1486, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.6414662084765178, |
|
"grad_norm": 0.09577670394462665, |
|
"learning_rate": 3.424458909881897e-06, |
|
"loss": 0.1351, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.6567392134402443, |
|
"grad_norm": 0.13052717933517727, |
|
"learning_rate": 3.173294878168025e-06, |
|
"loss": 0.1115, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.672012218403971, |
|
"grad_norm": 0.10868560247173807, |
|
"learning_rate": 2.9273441165485227e-06, |
|
"loss": 0.1429, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.6872852233676976, |
|
"grad_norm": 0.08492718279444092, |
|
"learning_rate": 2.687308548795825e-06, |
|
"loss": 0.1006, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.7025582283314242, |
|
"grad_norm": 0.11879878396233616, |
|
"learning_rate": 2.4538732171928847e-06, |
|
"loss": 0.1441, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.7178312332951509, |
|
"grad_norm": 0.08627366089386783, |
|
"learning_rate": 2.2277043274773856e-06, |
|
"loss": 0.121, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.7331042382588774, |
|
"grad_norm": 0.11860493468588708, |
|
"learning_rate": 2.00944734754392e-06, |
|
"loss": 0.104, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.748377243222604, |
|
"grad_norm": 0.13566719962532361, |
|
"learning_rate": 1.7997251653303249e-06, |
|
"loss": 0.1223, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.7636502481863306, |
|
"grad_norm": 0.11713757986892694, |
|
"learning_rate": 1.5991363111454023e-06, |
|
"loss": 0.1297, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.7789232531500573, |
|
"grad_norm": 0.0932474027124396, |
|
"learning_rate": 1.4082532495113627e-06, |
|
"loss": 0.129, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.7941962581137839, |
|
"grad_norm": 0.06960734581371894, |
|
"learning_rate": 1.2276207453959283e-06, |
|
"loss": 0.102, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.8094692630775105, |
|
"grad_norm": 0.1054188314922148, |
|
"learning_rate": 1.0577543094967613e-06, |
|
"loss": 0.1182, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.8247422680412371, |
|
"grad_norm": 0.15433498716395788, |
|
"learning_rate": 8.991387270152202e-07, |
|
"loss": 0.1306, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.8400152730049637, |
|
"grad_norm": 0.10854241303868371, |
|
"learning_rate": 7.522266741182305e-07, |
|
"loss": 0.11, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.8552882779686903, |
|
"grad_norm": 0.13631634648737045, |
|
"learning_rate": 6.174374260367611e-07, |
|
"loss": 0.1162, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.870561282932417, |
|
"grad_norm": 0.11447961702982919, |
|
"learning_rate": 4.951556604879049e-07, |
|
"loss": 0.1396, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.8858342878961436, |
|
"grad_norm": 0.1527666931393435, |
|
"learning_rate": 3.8573035983548167e-07, |
|
"loss": 0.1353, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.9011072928598702, |
|
"grad_norm": 0.13839722389025982, |
|
"learning_rate": 2.894738151223331e-07, |
|
"loss": 0.1193, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.9163802978235968, |
|
"grad_norm": 0.13196785663691574, |
|
"learning_rate": 2.0666073481669714e-07, |
|
"loss": 0.1147, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.9316533027873234, |
|
"grad_norm": 0.09878735062205103, |
|
"learning_rate": 1.375274608162447e-07, |
|
"loss": 0.1261, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.94692630775105, |
|
"grad_norm": 0.07952980044315368, |
|
"learning_rate": 8.227129394723643e-08, |
|
"loss": 0.0992, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.9621993127147767, |
|
"grad_norm": 0.09015417303190752, |
|
"learning_rate": 4.104993088376974e-08, |
|
"loss": 0.1212, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.9774723176785033, |
|
"grad_norm": 0.12899056246523352, |
|
"learning_rate": 1.3981014094099354e-08, |
|
"loss": 0.131, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.9927453226422298, |
|
"grad_norm": 0.08443201140015646, |
|
"learning_rate": 1.1417960984605459e-09, |
|
"loss": 0.1302, |
|
"step": 650 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 654, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 32812144386048.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|