|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9976711690731253, |
|
"eval_steps": 500, |
|
"global_step": 3219, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004657661853749418, |
|
"grad_norm": 0.3246580015338979, |
|
"learning_rate": 7.763975155279503e-07, |
|
"loss": 0.4054, |
|
"num_tokens": 2621440.0, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009315323707498836, |
|
"grad_norm": 0.30502580431160947, |
|
"learning_rate": 1.5527950310559006e-06, |
|
"loss": 0.4067, |
|
"num_tokens": 5242880.0, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013972985561248253, |
|
"grad_norm": 0.24294102073505805, |
|
"learning_rate": 2.329192546583851e-06, |
|
"loss": 0.3997, |
|
"num_tokens": 7845719.0, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.018630647414997672, |
|
"grad_norm": 0.29669820095143357, |
|
"learning_rate": 3.1055900621118013e-06, |
|
"loss": 0.4053, |
|
"num_tokens": 10457626.0, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02328830926874709, |
|
"grad_norm": 0.27363318055880104, |
|
"learning_rate": 3.881987577639752e-06, |
|
"loss": 0.4005, |
|
"num_tokens": 13067414.0, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.027945971122496506, |
|
"grad_norm": 0.2906913628187444, |
|
"learning_rate": 4.658385093167702e-06, |
|
"loss": 0.4053, |
|
"num_tokens": 15688854.0, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.032603632976245925, |
|
"grad_norm": 0.26831415568112743, |
|
"learning_rate": 5.4347826086956525e-06, |
|
"loss": 0.4097, |
|
"num_tokens": 18278844.0, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.037261294829995344, |
|
"grad_norm": 0.24707490474424423, |
|
"learning_rate": 6.2111801242236025e-06, |
|
"loss": 0.4125, |
|
"num_tokens": 20900161.0, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04191895668374476, |
|
"grad_norm": 0.25285098310366294, |
|
"learning_rate": 6.9875776397515525e-06, |
|
"loss": 0.3876, |
|
"num_tokens": 23521601.0, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04657661853749418, |
|
"grad_norm": 0.25343162367275623, |
|
"learning_rate": 7.763975155279503e-06, |
|
"loss": 0.3943, |
|
"num_tokens": 26143041.0, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05123428039124359, |
|
"grad_norm": 0.23791425487205492, |
|
"learning_rate": 8.540372670807453e-06, |
|
"loss": 0.392, |
|
"num_tokens": 28764481.0, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05589194224499301, |
|
"grad_norm": 0.22651488429822728, |
|
"learning_rate": 9.316770186335403e-06, |
|
"loss": 0.3917, |
|
"num_tokens": 31385921.0, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06054960409874243, |
|
"grad_norm": 0.25448337996378584, |
|
"learning_rate": 1.0093167701863353e-05, |
|
"loss": 0.3882, |
|
"num_tokens": 33990177.0, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06520726595249185, |
|
"grad_norm": 0.22131771878749865, |
|
"learning_rate": 1.0869565217391305e-05, |
|
"loss": 0.3744, |
|
"num_tokens": 36611617.0, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06986492780624126, |
|
"grad_norm": 0.2669486917080392, |
|
"learning_rate": 1.1645962732919255e-05, |
|
"loss": 0.3782, |
|
"num_tokens": 39218907.0, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07452258965999069, |
|
"grad_norm": 0.25872212177529486, |
|
"learning_rate": 1.2422360248447205e-05, |
|
"loss": 0.3903, |
|
"num_tokens": 41840347.0, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0791802515137401, |
|
"grad_norm": 0.22749950020081922, |
|
"learning_rate": 1.3198757763975155e-05, |
|
"loss": 0.3722, |
|
"num_tokens": 44461787.0, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08383791336748952, |
|
"grad_norm": 0.2850975365464826, |
|
"learning_rate": 1.3975155279503105e-05, |
|
"loss": 0.3785, |
|
"num_tokens": 47083227.0, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08849557522123894, |
|
"grad_norm": 0.24567493862103307, |
|
"learning_rate": 1.4751552795031057e-05, |
|
"loss": 0.3817, |
|
"num_tokens": 49704667.0, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09315323707498836, |
|
"grad_norm": 0.25985197139962063, |
|
"learning_rate": 1.5527950310559007e-05, |
|
"loss": 0.3765, |
|
"num_tokens": 52326107.0, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09781089892873777, |
|
"grad_norm": 0.2910146307916693, |
|
"learning_rate": 1.630434782608696e-05, |
|
"loss": 0.3756, |
|
"num_tokens": 54947547.0, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.10246856078248719, |
|
"grad_norm": 0.2775046213474384, |
|
"learning_rate": 1.7080745341614907e-05, |
|
"loss": 0.3697, |
|
"num_tokens": 57568987.0, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.10712622263623661, |
|
"grad_norm": 0.348005475178152, |
|
"learning_rate": 1.785714285714286e-05, |
|
"loss": 0.3687, |
|
"num_tokens": 60190427.0, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.11178388448998602, |
|
"grad_norm": 0.29715452279773225, |
|
"learning_rate": 1.8633540372670807e-05, |
|
"loss": 0.3786, |
|
"num_tokens": 62811867.0, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11644154634373545, |
|
"grad_norm": 0.30874357889032844, |
|
"learning_rate": 1.940993788819876e-05, |
|
"loss": 0.3612, |
|
"num_tokens": 65433307.0, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.12109920819748486, |
|
"grad_norm": 0.31748202342003423, |
|
"learning_rate": 2.0186335403726707e-05, |
|
"loss": 0.3611, |
|
"num_tokens": 68034228.0, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1257568700512343, |
|
"grad_norm": 0.33434897463305013, |
|
"learning_rate": 2.096273291925466e-05, |
|
"loss": 0.3683, |
|
"num_tokens": 70655668.0, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1304145319049837, |
|
"grad_norm": 0.30833968968841147, |
|
"learning_rate": 2.173913043478261e-05, |
|
"loss": 0.3613, |
|
"num_tokens": 73277108.0, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1350721937587331, |
|
"grad_norm": 0.3231346578790754, |
|
"learning_rate": 2.2515527950310562e-05, |
|
"loss": 0.3693, |
|
"num_tokens": 75898548.0, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.13972985561248252, |
|
"grad_norm": 0.3529022033943459, |
|
"learning_rate": 2.329192546583851e-05, |
|
"loss": 0.3626, |
|
"num_tokens": 78519988.0, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14438751746623196, |
|
"grad_norm": 0.3371933430263348, |
|
"learning_rate": 2.4068322981366462e-05, |
|
"loss": 0.3495, |
|
"num_tokens": 81141428.0, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.14904517931998137, |
|
"grad_norm": 0.34848324489455, |
|
"learning_rate": 2.484472049689441e-05, |
|
"loss": 0.3531, |
|
"num_tokens": 83733147.0, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1537028411737308, |
|
"grad_norm": 0.383127789784734, |
|
"learning_rate": 2.5621118012422362e-05, |
|
"loss": 0.3525, |
|
"num_tokens": 86351565.0, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.1583605030274802, |
|
"grad_norm": 0.348878572043702, |
|
"learning_rate": 2.639751552795031e-05, |
|
"loss": 0.3512, |
|
"num_tokens": 88973005.0, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1630181648812296, |
|
"grad_norm": 0.40565527852072275, |
|
"learning_rate": 2.7173913043478262e-05, |
|
"loss": 0.3595, |
|
"num_tokens": 91572965.0, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.16767582673497905, |
|
"grad_norm": 0.29809752183694505, |
|
"learning_rate": 2.795031055900621e-05, |
|
"loss": 0.3521, |
|
"num_tokens": 94194405.0, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.17233348858872846, |
|
"grad_norm": 0.46897573673807097, |
|
"learning_rate": 2.8726708074534165e-05, |
|
"loss": 0.3522, |
|
"num_tokens": 96814074.0, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"grad_norm": 0.3700763516693258, |
|
"learning_rate": 2.9503105590062114e-05, |
|
"loss": 0.3436, |
|
"num_tokens": 99435514.0, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.18164881229622729, |
|
"grad_norm": 0.36250511200009977, |
|
"learning_rate": 3.0279503105590062e-05, |
|
"loss": 0.3487, |
|
"num_tokens": 102056954.0, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.18630647414997673, |
|
"grad_norm": 0.4535239213643802, |
|
"learning_rate": 3.1055900621118014e-05, |
|
"loss": 0.326, |
|
"num_tokens": 104633209.0, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.19096413600372614, |
|
"grad_norm": 0.4326434738607519, |
|
"learning_rate": 3.183229813664597e-05, |
|
"loss": 0.3402, |
|
"num_tokens": 107177869.0, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.19562179785747555, |
|
"grad_norm": 0.4383076498405631, |
|
"learning_rate": 3.260869565217392e-05, |
|
"loss": 0.3316, |
|
"num_tokens": 109799309.0, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.20027945971122496, |
|
"grad_norm": 0.5145313193864417, |
|
"learning_rate": 3.3385093167701865e-05, |
|
"loss": 0.338, |
|
"num_tokens": 112389962.0, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.20493712156497437, |
|
"grad_norm": 0.3674584315687584, |
|
"learning_rate": 3.4161490683229814e-05, |
|
"loss": 0.336, |
|
"num_tokens": 115000610.0, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2095947834187238, |
|
"grad_norm": 0.46762626808542057, |
|
"learning_rate": 3.493788819875777e-05, |
|
"loss": 0.3363, |
|
"num_tokens": 117622050.0, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.21425244527247322, |
|
"grad_norm": 0.378012859125403, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 0.3333, |
|
"num_tokens": 120229718.0, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.21891010712622264, |
|
"grad_norm": 0.4281393022990085, |
|
"learning_rate": 3.6490683229813665e-05, |
|
"loss": 0.3266, |
|
"num_tokens": 122851158.0, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.22356776897997205, |
|
"grad_norm": 0.570048871418008, |
|
"learning_rate": 3.7267080745341614e-05, |
|
"loss": 0.3287, |
|
"num_tokens": 125472598.0, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.22822543083372146, |
|
"grad_norm": 0.5343887029456514, |
|
"learning_rate": 3.804347826086957e-05, |
|
"loss": 0.3156, |
|
"num_tokens": 128065220.0, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2328830926874709, |
|
"grad_norm": 0.4625893787751897, |
|
"learning_rate": 3.881987577639752e-05, |
|
"loss": 0.3156, |
|
"num_tokens": 130686660.0, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2375407545412203, |
|
"grad_norm": 0.43218634350538265, |
|
"learning_rate": 3.9596273291925465e-05, |
|
"loss": 0.3204, |
|
"num_tokens": 133297528.0, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.24219841639496972, |
|
"grad_norm": 0.4186876218844114, |
|
"learning_rate": 4.0372670807453414e-05, |
|
"loss": 0.3219, |
|
"num_tokens": 135918968.0, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.24685607824871914, |
|
"grad_norm": 0.5702442122844179, |
|
"learning_rate": 4.114906832298137e-05, |
|
"loss": 0.3184, |
|
"num_tokens": 138540408.0, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2515137401024686, |
|
"grad_norm": 0.4360821769531783, |
|
"learning_rate": 4.192546583850932e-05, |
|
"loss": 0.3091, |
|
"num_tokens": 141161848.0, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.25617140195621796, |
|
"grad_norm": 0.4520415095190347, |
|
"learning_rate": 4.270186335403727e-05, |
|
"loss": 0.3189, |
|
"num_tokens": 143774319.0, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.2608290638099674, |
|
"grad_norm": 0.4867584283827858, |
|
"learning_rate": 4.347826086956522e-05, |
|
"loss": 0.3072, |
|
"num_tokens": 146395759.0, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.26548672566371684, |
|
"grad_norm": 0.531950209741864, |
|
"learning_rate": 4.425465838509317e-05, |
|
"loss": 0.3098, |
|
"num_tokens": 149017199.0, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2701443875174662, |
|
"grad_norm": 0.4196291051314364, |
|
"learning_rate": 4.5031055900621124e-05, |
|
"loss": 0.3052, |
|
"num_tokens": 151638639.0, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.27480204937121566, |
|
"grad_norm": 0.6527165477900265, |
|
"learning_rate": 4.580745341614907e-05, |
|
"loss": 0.3016, |
|
"num_tokens": 154260079.0, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.27945971122496505, |
|
"grad_norm": 0.5379025127643137, |
|
"learning_rate": 4.658385093167702e-05, |
|
"loss": 0.3068, |
|
"num_tokens": 156881519.0, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2841173730787145, |
|
"grad_norm": 0.6496049826211475, |
|
"learning_rate": 4.736024844720497e-05, |
|
"loss": 0.3088, |
|
"num_tokens": 159470618.0, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.2887750349324639, |
|
"grad_norm": 0.46608959595914534, |
|
"learning_rate": 4.8136645962732924e-05, |
|
"loss": 0.3043, |
|
"num_tokens": 162092058.0, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2934326967862133, |
|
"grad_norm": 0.4688858785603408, |
|
"learning_rate": 4.891304347826087e-05, |
|
"loss": 0.3025, |
|
"num_tokens": 164713498.0, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.29809035863996275, |
|
"grad_norm": 0.5272676153698671, |
|
"learning_rate": 4.968944099378882e-05, |
|
"loss": 0.3059, |
|
"num_tokens": 167334938.0, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.30274802049371213, |
|
"grad_norm": 0.630120084552457, |
|
"learning_rate": 4.994822229892993e-05, |
|
"loss": 0.3079, |
|
"num_tokens": 169956378.0, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.3074056823474616, |
|
"grad_norm": 0.8807680093317907, |
|
"learning_rate": 4.986192613047981e-05, |
|
"loss": 0.3053, |
|
"num_tokens": 172577818.0, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.312063344201211, |
|
"grad_norm": 0.4762589007693949, |
|
"learning_rate": 4.977562996202969e-05, |
|
"loss": 0.3024, |
|
"num_tokens": 175199258.0, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.3167210060549604, |
|
"grad_norm": 1.4269007218795318, |
|
"learning_rate": 4.968933379357957e-05, |
|
"loss": 0.3232, |
|
"num_tokens": 177820698.0, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.32137866790870984, |
|
"grad_norm": 0.45352771708953626, |
|
"learning_rate": 4.9603037625129445e-05, |
|
"loss": 0.3053, |
|
"num_tokens": 180442138.0, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.3260363297624592, |
|
"grad_norm": 0.5285022339179731, |
|
"learning_rate": 4.951674145667933e-05, |
|
"loss": 0.3036, |
|
"num_tokens": 183063578.0, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.33069399161620866, |
|
"grad_norm": 0.5169751746621581, |
|
"learning_rate": 4.94304452882292e-05, |
|
"loss": 0.2974, |
|
"num_tokens": 185685018.0, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3353516534699581, |
|
"grad_norm": 0.548293496106218, |
|
"learning_rate": 4.934414911977908e-05, |
|
"loss": 0.3039, |
|
"num_tokens": 188300024.0, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3400093153237075, |
|
"grad_norm": 0.4295101153028381, |
|
"learning_rate": 4.9257852951328965e-05, |
|
"loss": 0.2986, |
|
"num_tokens": 190921464.0, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.3446669771774569, |
|
"grad_norm": 0.417345362400268, |
|
"learning_rate": 4.917155678287884e-05, |
|
"loss": 0.3092, |
|
"num_tokens": 193536988.0, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3493246390312063, |
|
"grad_norm": 0.5131381678527972, |
|
"learning_rate": 4.908526061442872e-05, |
|
"loss": 0.3034, |
|
"num_tokens": 196158428.0, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.35398230088495575, |
|
"grad_norm": 0.5052096528443951, |
|
"learning_rate": 4.89989644459786e-05, |
|
"loss": 0.2947, |
|
"num_tokens": 198779868.0, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3586399627387052, |
|
"grad_norm": 0.4356730038516669, |
|
"learning_rate": 4.891266827752848e-05, |
|
"loss": 0.2912, |
|
"num_tokens": 201401308.0, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.36329762459245457, |
|
"grad_norm": 0.5253395031021396, |
|
"learning_rate": 4.882637210907836e-05, |
|
"loss": 0.2944, |
|
"num_tokens": 204007207.0, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.367955286446204, |
|
"grad_norm": 0.5029033782379033, |
|
"learning_rate": 4.874007594062824e-05, |
|
"loss": 0.291, |
|
"num_tokens": 206628647.0, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.37261294829995345, |
|
"grad_norm": 0.4270568679976557, |
|
"learning_rate": 4.865377977217811e-05, |
|
"loss": 0.295, |
|
"num_tokens": 209250087.0, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.37727061015370283, |
|
"grad_norm": 0.3464954396580666, |
|
"learning_rate": 4.8567483603728e-05, |
|
"loss": 0.2928, |
|
"num_tokens": 211846391.0, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.3819282720074523, |
|
"grad_norm": 0.4624178520334951, |
|
"learning_rate": 4.8481187435277875e-05, |
|
"loss": 0.2931, |
|
"num_tokens": 214422531.0, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.38658593386120166, |
|
"grad_norm": 0.44827248521714486, |
|
"learning_rate": 4.839489126682776e-05, |
|
"loss": 0.2947, |
|
"num_tokens": 217043971.0, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.3912435957149511, |
|
"grad_norm": 0.461241958776309, |
|
"learning_rate": 4.830859509837763e-05, |
|
"loss": 0.2871, |
|
"num_tokens": 219639039.0, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.39590125756870054, |
|
"grad_norm": 0.4360760147983306, |
|
"learning_rate": 4.822229892992751e-05, |
|
"loss": 0.3001, |
|
"num_tokens": 222260479.0, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.4005589194224499, |
|
"grad_norm": 0.46420621837516945, |
|
"learning_rate": 4.8136002761477395e-05, |
|
"loss": 0.2973, |
|
"num_tokens": 224881919.0, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.40521658127619936, |
|
"grad_norm": 0.3733620380961576, |
|
"learning_rate": 4.804970659302727e-05, |
|
"loss": 0.2948, |
|
"num_tokens": 227481635.0, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.40987424312994875, |
|
"grad_norm": 0.4916243745680279, |
|
"learning_rate": 4.796341042457715e-05, |
|
"loss": 0.3006, |
|
"num_tokens": 230103075.0, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4145319049836982, |
|
"grad_norm": 0.4402270249594231, |
|
"learning_rate": 4.787711425612703e-05, |
|
"loss": 0.2878, |
|
"num_tokens": 232689331.0, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4191895668374476, |
|
"grad_norm": 0.5097510648484884, |
|
"learning_rate": 4.779081808767691e-05, |
|
"loss": 0.2912, |
|
"num_tokens": 235301283.0, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.423847228691197, |
|
"grad_norm": 0.4733020023413381, |
|
"learning_rate": 4.770452191922679e-05, |
|
"loss": 0.3042, |
|
"num_tokens": 237900010.0, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.42850489054494645, |
|
"grad_norm": 0.4182733746030012, |
|
"learning_rate": 4.761822575077667e-05, |
|
"loss": 0.2927, |
|
"num_tokens": 240521450.0, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.43316255239869583, |
|
"grad_norm": 0.433165710595746, |
|
"learning_rate": 4.753192958232654e-05, |
|
"loss": 0.2928, |
|
"num_tokens": 243110250.0, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.43782021425244527, |
|
"grad_norm": 0.3999472466997859, |
|
"learning_rate": 4.744563341387643e-05, |
|
"loss": 0.2959, |
|
"num_tokens": 245731690.0, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4424778761061947, |
|
"grad_norm": 0.4401937042106931, |
|
"learning_rate": 4.7359337245426306e-05, |
|
"loss": 0.3011, |
|
"num_tokens": 248353130.0, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4471355379599441, |
|
"grad_norm": 0.39463363710081184, |
|
"learning_rate": 4.7273041076976184e-05, |
|
"loss": 0.295, |
|
"num_tokens": 250974570.0, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.45179319981369354, |
|
"grad_norm": 0.369259633414095, |
|
"learning_rate": 4.718674490852606e-05, |
|
"loss": 0.2974, |
|
"num_tokens": 253596010.0, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.4564508616674429, |
|
"grad_norm": 0.42451110897649086, |
|
"learning_rate": 4.710044874007594e-05, |
|
"loss": 0.3037, |
|
"num_tokens": 256216527.0, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.46110852352119236, |
|
"grad_norm": 0.41192640646628453, |
|
"learning_rate": 4.7014152571625826e-05, |
|
"loss": 0.3106, |
|
"num_tokens": 258829080.0, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4657661853749418, |
|
"grad_norm": 0.4565762913092404, |
|
"learning_rate": 4.6927856403175704e-05, |
|
"loss": 0.3355, |
|
"num_tokens": 261450520.0, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4704238472286912, |
|
"grad_norm": 0.3565391494715242, |
|
"learning_rate": 4.684156023472558e-05, |
|
"loss": 0.3478, |
|
"num_tokens": 264063793.0, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.4750815090824406, |
|
"grad_norm": 0.3391571716677179, |
|
"learning_rate": 4.675526406627546e-05, |
|
"loss": 0.3465, |
|
"num_tokens": 266677642.0, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.47973917093619, |
|
"grad_norm": 0.3752589674406903, |
|
"learning_rate": 4.666896789782534e-05, |
|
"loss": 0.3548, |
|
"num_tokens": 269299082.0, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.48439683278993945, |
|
"grad_norm": 0.4003513088677163, |
|
"learning_rate": 4.658267172937522e-05, |
|
"loss": 0.3502, |
|
"num_tokens": 271920522.0, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.4890544946436889, |
|
"grad_norm": 0.37773061821039194, |
|
"learning_rate": 4.64963755609251e-05, |
|
"loss": 0.3572, |
|
"num_tokens": 274541962.0, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.49371215649743827, |
|
"grad_norm": 0.3578568943439884, |
|
"learning_rate": 4.641007939247497e-05, |
|
"loss": 0.3464, |
|
"num_tokens": 277161966.0, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4983698183511877, |
|
"grad_norm": 0.3506179208934016, |
|
"learning_rate": 4.632378322402486e-05, |
|
"loss": 0.3513, |
|
"num_tokens": 279777840.0, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5030274802049371, |
|
"grad_norm": 0.3972896159155102, |
|
"learning_rate": 4.6237487055574736e-05, |
|
"loss": 0.3545, |
|
"num_tokens": 282399280.0, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5076851420586865, |
|
"grad_norm": 0.4223297300478965, |
|
"learning_rate": 4.6151190887124615e-05, |
|
"loss": 0.345, |
|
"num_tokens": 284959243.0, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5123428039124359, |
|
"grad_norm": 0.40153010744650247, |
|
"learning_rate": 4.606489471867449e-05, |
|
"loss": 0.3596, |
|
"num_tokens": 287580360.0, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5170004657661854, |
|
"grad_norm": 0.4154707265721631, |
|
"learning_rate": 4.597859855022437e-05, |
|
"loss": 0.3506, |
|
"num_tokens": 290201800.0, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5216581276199348, |
|
"grad_norm": 0.4455642719609879, |
|
"learning_rate": 4.589230238177425e-05, |
|
"loss": 0.3528, |
|
"num_tokens": 292817720.0, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 0.3997933363319544, |
|
"learning_rate": 4.5806006213324134e-05, |
|
"loss": 0.3519, |
|
"num_tokens": 295439160.0, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5309734513274337, |
|
"grad_norm": 0.31094395987853973, |
|
"learning_rate": 4.5719710044874006e-05, |
|
"loss": 0.3508, |
|
"num_tokens": 298031803.0, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5356311131811831, |
|
"grad_norm": 0.309211035269928, |
|
"learning_rate": 4.563341387642389e-05, |
|
"loss": 0.3489, |
|
"num_tokens": 300653243.0, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.5402887750349324, |
|
"grad_norm": 0.359619218023151, |
|
"learning_rate": 4.554711770797377e-05, |
|
"loss": 0.3471, |
|
"num_tokens": 303225503.0, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5449464368886818, |
|
"grad_norm": 0.34255420079750104, |
|
"learning_rate": 4.546082153952365e-05, |
|
"loss": 0.3476, |
|
"num_tokens": 305846943.0, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5496040987424313, |
|
"grad_norm": 0.40542182146127437, |
|
"learning_rate": 4.5374525371073526e-05, |
|
"loss": 0.3606, |
|
"num_tokens": 308468383.0, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5542617605961807, |
|
"grad_norm": 0.4491850870380039, |
|
"learning_rate": 4.5288229202623404e-05, |
|
"loss": 0.3535, |
|
"num_tokens": 311089823.0, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5589194224499301, |
|
"grad_norm": 0.4501517095082673, |
|
"learning_rate": 4.520193303417328e-05, |
|
"loss": 0.3552, |
|
"num_tokens": 313711263.0, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5635770843036796, |
|
"grad_norm": 0.3635512034769944, |
|
"learning_rate": 4.511563686572317e-05, |
|
"loss": 0.3365, |
|
"num_tokens": 316309964.0, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.568234746157429, |
|
"grad_norm": 0.34483733371492037, |
|
"learning_rate": 4.5029340697273045e-05, |
|
"loss": 0.3499, |
|
"num_tokens": 318931404.0, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5728924080111784, |
|
"grad_norm": 0.3572559367192021, |
|
"learning_rate": 4.4943044528822923e-05, |
|
"loss": 0.3463, |
|
"num_tokens": 321552844.0, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5775500698649279, |
|
"grad_norm": 0.3436731405392265, |
|
"learning_rate": 4.48567483603728e-05, |
|
"loss": 0.3508, |
|
"num_tokens": 324174284.0, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5822077317186772, |
|
"grad_norm": 0.5303742013030021, |
|
"learning_rate": 4.477045219192268e-05, |
|
"loss": 0.3568, |
|
"num_tokens": 326795724.0, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5868653935724266, |
|
"grad_norm": 0.40428231280479165, |
|
"learning_rate": 4.4684156023472565e-05, |
|
"loss": 0.3463, |
|
"num_tokens": 329417164.0, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5915230554261761, |
|
"grad_norm": 0.3432839299493264, |
|
"learning_rate": 4.4597859855022436e-05, |
|
"loss": 0.3465, |
|
"num_tokens": 332014390.0, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5961807172799255, |
|
"grad_norm": 0.4220791189143345, |
|
"learning_rate": 4.4511563686572315e-05, |
|
"loss": 0.3508, |
|
"num_tokens": 334628125.0, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6008383791336749, |
|
"grad_norm": 0.4586942895958512, |
|
"learning_rate": 4.44252675181222e-05, |
|
"loss": 0.3488, |
|
"num_tokens": 337230275.0, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6054960409874243, |
|
"grad_norm": 0.3714109798341085, |
|
"learning_rate": 4.433897134967208e-05, |
|
"loss": 0.3499, |
|
"num_tokens": 339850764.0, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6101537028411738, |
|
"grad_norm": 0.4008926116406693, |
|
"learning_rate": 4.4252675181221956e-05, |
|
"loss": 0.3541, |
|
"num_tokens": 342472204.0, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.6148113646949231, |
|
"grad_norm": 0.34765377356863303, |
|
"learning_rate": 4.4166379012771834e-05, |
|
"loss": 0.3543, |
|
"num_tokens": 345093644.0, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6194690265486725, |
|
"grad_norm": 0.32332322072308267, |
|
"learning_rate": 4.408008284432171e-05, |
|
"loss": 0.3485, |
|
"num_tokens": 347686320.0, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.624126688402422, |
|
"grad_norm": 0.35580436954639033, |
|
"learning_rate": 4.39937866758716e-05, |
|
"loss": 0.3524, |
|
"num_tokens": 350307760.0, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6287843502561714, |
|
"grad_norm": 0.4333129378651075, |
|
"learning_rate": 4.3907490507421476e-05, |
|
"loss": 0.3479, |
|
"num_tokens": 352915502.0, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.6334420121099208, |
|
"grad_norm": 0.33390835131596075, |
|
"learning_rate": 4.382119433897135e-05, |
|
"loss": 0.3494, |
|
"num_tokens": 355507604.0, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6380996739636703, |
|
"grad_norm": 0.357173329760582, |
|
"learning_rate": 4.373489817052123e-05, |
|
"loss": 0.3576, |
|
"num_tokens": 358102620.0, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.6427573358174197, |
|
"grad_norm": 0.355905703126643, |
|
"learning_rate": 4.364860200207111e-05, |
|
"loss": 0.3508, |
|
"num_tokens": 360724060.0, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6474149976711691, |
|
"grad_norm": 0.3923025321797665, |
|
"learning_rate": 4.356230583362099e-05, |
|
"loss": 0.3512, |
|
"num_tokens": 363345500.0, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.6520726595249184, |
|
"grad_norm": 0.4615340443716854, |
|
"learning_rate": 4.347600966517087e-05, |
|
"loss": 0.3488, |
|
"num_tokens": 365965454.0, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6567303213786679, |
|
"grad_norm": 0.3508078022803716, |
|
"learning_rate": 4.3389713496720745e-05, |
|
"loss": 0.3466, |
|
"num_tokens": 368566578.0, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6613879832324173, |
|
"grad_norm": 0.39418653114960156, |
|
"learning_rate": 4.330341732827063e-05, |
|
"loss": 0.3554, |
|
"num_tokens": 371168165.0, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6660456450861667, |
|
"grad_norm": 0.5091202912446522, |
|
"learning_rate": 4.321712115982051e-05, |
|
"loss": 0.3512, |
|
"num_tokens": 373789605.0, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6707033069399162, |
|
"grad_norm": 0.35769440707050915, |
|
"learning_rate": 4.3130824991370387e-05, |
|
"loss": 0.3475, |
|
"num_tokens": 376376000.0, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6753609687936656, |
|
"grad_norm": 0.30703298011124236, |
|
"learning_rate": 4.3044528822920265e-05, |
|
"loss": 0.3529, |
|
"num_tokens": 378997440.0, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.680018630647415, |
|
"grad_norm": 0.40296087619428983, |
|
"learning_rate": 4.295823265447014e-05, |
|
"loss": 0.3561, |
|
"num_tokens": 381618880.0, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6846762925011645, |
|
"grad_norm": 0.3305890378890626, |
|
"learning_rate": 4.287193648602002e-05, |
|
"loss": 0.3451, |
|
"num_tokens": 384240320.0, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.6893339543549138, |
|
"grad_norm": 0.4068405386758603, |
|
"learning_rate": 4.27856403175699e-05, |
|
"loss": 0.348, |
|
"num_tokens": 386861760.0, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6939916162086632, |
|
"grad_norm": 0.3324914070368526, |
|
"learning_rate": 4.269934414911978e-05, |
|
"loss": 0.3484, |
|
"num_tokens": 389483200.0, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6986492780624126, |
|
"grad_norm": 0.38594952569803137, |
|
"learning_rate": 4.261304798066966e-05, |
|
"loss": 0.3531, |
|
"num_tokens": 392104640.0, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.7033069399161621, |
|
"grad_norm": 0.3904186621380532, |
|
"learning_rate": 4.252675181221954e-05, |
|
"loss": 0.3445, |
|
"num_tokens": 394726080.0, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.7079646017699115, |
|
"grad_norm": 0.34862987122765965, |
|
"learning_rate": 4.244045564376942e-05, |
|
"loss": 0.3456, |
|
"num_tokens": 397347520.0, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.7126222636236609, |
|
"grad_norm": 0.31776518778099616, |
|
"learning_rate": 4.23541594753193e-05, |
|
"loss": 0.3523, |
|
"num_tokens": 399968838.0, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.7172799254774104, |
|
"grad_norm": 0.4042167640996357, |
|
"learning_rate": 4.2267863306869176e-05, |
|
"loss": 0.3533, |
|
"num_tokens": 402590278.0, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7219375873311598, |
|
"grad_norm": 0.48879019149496866, |
|
"learning_rate": 4.2181567138419054e-05, |
|
"loss": 0.3438, |
|
"num_tokens": 405198917.0, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.7265952491849091, |
|
"grad_norm": 0.4174350631641972, |
|
"learning_rate": 4.209527096996894e-05, |
|
"loss": 0.3563, |
|
"num_tokens": 407782773.0, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7312529110386586, |
|
"grad_norm": 0.2959354186783129, |
|
"learning_rate": 4.200897480151881e-05, |
|
"loss": 0.3518, |
|
"num_tokens": 410401323.0, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.735910572892408, |
|
"grad_norm": 0.3538958333784061, |
|
"learning_rate": 4.1922678633068695e-05, |
|
"loss": 0.3463, |
|
"num_tokens": 413022763.0, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7405682347461574, |
|
"grad_norm": 0.3209486833709415, |
|
"learning_rate": 4.1836382464618573e-05, |
|
"loss": 0.3505, |
|
"num_tokens": 415622538.0, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.7452258965999069, |
|
"grad_norm": 0.32824211239120443, |
|
"learning_rate": 4.175008629616845e-05, |
|
"loss": 0.3492, |
|
"num_tokens": 418243978.0, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.7498835584536563, |
|
"grad_norm": 0.3388544675623175, |
|
"learning_rate": 4.166379012771833e-05, |
|
"loss": 0.3596, |
|
"num_tokens": 420835113.0, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.7545412203074057, |
|
"grad_norm": 0.35049650782126746, |
|
"learning_rate": 4.157749395926821e-05, |
|
"loss": 0.3492, |
|
"num_tokens": 423445640.0, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.759198882161155, |
|
"grad_norm": 0.43085749480945107, |
|
"learning_rate": 4.1491197790818086e-05, |
|
"loss": 0.3516, |
|
"num_tokens": 426038048.0, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.7638565440149045, |
|
"grad_norm": 0.3800016220245255, |
|
"learning_rate": 4.140490162236797e-05, |
|
"loss": 0.3536, |
|
"num_tokens": 428659488.0, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7685142058686539, |
|
"grad_norm": 0.40534919951359066, |
|
"learning_rate": 4.131860545391785e-05, |
|
"loss": 0.3458, |
|
"num_tokens": 431280928.0, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.7731718677224033, |
|
"grad_norm": 0.3282502514421546, |
|
"learning_rate": 4.123230928546773e-05, |
|
"loss": 0.351, |
|
"num_tokens": 433902368.0, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7778295295761528, |
|
"grad_norm": 0.3425108014869088, |
|
"learning_rate": 4.1146013117017606e-05, |
|
"loss": 0.3564, |
|
"num_tokens": 436522664.0, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.7824871914299022, |
|
"grad_norm": 0.33361835217876357, |
|
"learning_rate": 4.1059716948567484e-05, |
|
"loss": 0.3433, |
|
"num_tokens": 439124783.0, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7871448532836516, |
|
"grad_norm": 0.33798159635466163, |
|
"learning_rate": 4.097342078011737e-05, |
|
"loss": 0.3496, |
|
"num_tokens": 441746223.0, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.7918025151374011, |
|
"grad_norm": 0.447347998974799, |
|
"learning_rate": 4.088712461166724e-05, |
|
"loss": 0.351, |
|
"num_tokens": 444367663.0, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7964601769911505, |
|
"grad_norm": 0.3525745631003211, |
|
"learning_rate": 4.080082844321712e-05, |
|
"loss": 0.3457, |
|
"num_tokens": 446989103.0, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.8011178388448998, |
|
"grad_norm": 0.31916567647993294, |
|
"learning_rate": 4.0714532274767004e-05, |
|
"loss": 0.3451, |
|
"num_tokens": 449590185.0, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.8057755006986492, |
|
"grad_norm": 0.318983153886092, |
|
"learning_rate": 4.062823610631688e-05, |
|
"loss": 0.3485, |
|
"num_tokens": 452202740.0, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.8104331625523987, |
|
"grad_norm": 0.29667088926699325, |
|
"learning_rate": 4.054193993786676e-05, |
|
"loss": 0.3444, |
|
"num_tokens": 454824180.0, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.8150908244061481, |
|
"grad_norm": 0.31207562700596997, |
|
"learning_rate": 4.045564376941664e-05, |
|
"loss": 0.3517, |
|
"num_tokens": 457445620.0, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.8197484862598975, |
|
"grad_norm": 0.34870985935788257, |
|
"learning_rate": 4.036934760096652e-05, |
|
"loss": 0.3394, |
|
"num_tokens": 460067060.0, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.824406148113647, |
|
"grad_norm": 0.30388580216897826, |
|
"learning_rate": 4.02830514325164e-05, |
|
"loss": 0.3507, |
|
"num_tokens": 462674172.0, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.8290638099673964, |
|
"grad_norm": 0.32472261917860645, |
|
"learning_rate": 4.019675526406628e-05, |
|
"loss": 0.3501, |
|
"num_tokens": 465295612.0, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8337214718211458, |
|
"grad_norm": 0.3285171952667728, |
|
"learning_rate": 4.011045909561615e-05, |
|
"loss": 0.3504, |
|
"num_tokens": 467917052.0, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.8383791336748952, |
|
"grad_norm": 0.3323900078743967, |
|
"learning_rate": 4.0024162927166037e-05, |
|
"loss": 0.3471, |
|
"num_tokens": 470538492.0, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8430367955286446, |
|
"grad_norm": 0.32807804825654646, |
|
"learning_rate": 3.9937866758715915e-05, |
|
"loss": 0.3511, |
|
"num_tokens": 473159932.0, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.847694457382394, |
|
"grad_norm": 0.28919453745593604, |
|
"learning_rate": 3.98515705902658e-05, |
|
"loss": 0.3545, |
|
"num_tokens": 475757711.0, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8523521192361434, |
|
"grad_norm": 0.46516583490606656, |
|
"learning_rate": 3.976527442181567e-05, |
|
"loss": 0.3487, |
|
"num_tokens": 478352124.0, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.8570097810898929, |
|
"grad_norm": 0.41404074008661385, |
|
"learning_rate": 3.967897825336555e-05, |
|
"loss": 0.3544, |
|
"num_tokens": 480973564.0, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.8616674429436423, |
|
"grad_norm": 0.363700040967719, |
|
"learning_rate": 3.9592682084915434e-05, |
|
"loss": 0.35, |
|
"num_tokens": 483595004.0, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.8663251047973917, |
|
"grad_norm": 0.42236484445152317, |
|
"learning_rate": 3.950638591646531e-05, |
|
"loss": 0.3531, |
|
"num_tokens": 486216444.0, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.8709827666511412, |
|
"grad_norm": 0.4078726261905896, |
|
"learning_rate": 3.942008974801519e-05, |
|
"loss": 0.3411, |
|
"num_tokens": 488837884.0, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.8756404285048905, |
|
"grad_norm": 0.3918480578581464, |
|
"learning_rate": 3.933379357956507e-05, |
|
"loss": 0.3441, |
|
"num_tokens": 491454413.0, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8802980903586399, |
|
"grad_norm": 0.3170963095548561, |
|
"learning_rate": 3.924749741111495e-05, |
|
"loss": 0.3456, |
|
"num_tokens": 494064784.0, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8849557522123894, |
|
"grad_norm": 0.3208860567524943, |
|
"learning_rate": 3.916120124266483e-05, |
|
"loss": 0.338, |
|
"num_tokens": 496686224.0, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8896134140661388, |
|
"grad_norm": 0.3597661575787414, |
|
"learning_rate": 3.9074905074214704e-05, |
|
"loss": 0.3494, |
|
"num_tokens": 499307664.0, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8942710759198882, |
|
"grad_norm": 0.43417260548610914, |
|
"learning_rate": 3.898860890576458e-05, |
|
"loss": 0.3528, |
|
"num_tokens": 501920213.0, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8989287377736377, |
|
"grad_norm": 0.31106495064645334, |
|
"learning_rate": 3.890231273731447e-05, |
|
"loss": 0.3498, |
|
"num_tokens": 504541653.0, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.9035863996273871, |
|
"grad_norm": 0.35221034421913827, |
|
"learning_rate": 3.8816016568864345e-05, |
|
"loss": 0.353, |
|
"num_tokens": 507163093.0, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.9082440614811365, |
|
"grad_norm": 0.33006907184172946, |
|
"learning_rate": 3.8729720400414224e-05, |
|
"loss": 0.3495, |
|
"num_tokens": 509784533.0, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.9129017233348858, |
|
"grad_norm": 0.463640975856773, |
|
"learning_rate": 3.86434242319641e-05, |
|
"loss": 0.3554, |
|
"num_tokens": 512405973.0, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.9175593851886353, |
|
"grad_norm": 0.27816047488495105, |
|
"learning_rate": 3.855712806351398e-05, |
|
"loss": 0.3631, |
|
"num_tokens": 515027413.0, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.9222170470423847, |
|
"grad_norm": 2.168338544722683, |
|
"learning_rate": 3.8470831895063865e-05, |
|
"loss": 0.3456, |
|
"num_tokens": 517639572.0, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.9268747088961341, |
|
"grad_norm": 0.38328693899390504, |
|
"learning_rate": 3.838453572661374e-05, |
|
"loss": 0.3711, |
|
"num_tokens": 520261012.0, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.9315323707498836, |
|
"grad_norm": 0.4244503538190939, |
|
"learning_rate": 3.8298239558163615e-05, |
|
"loss": 0.3914, |
|
"num_tokens": 522882452.0, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.936190032603633, |
|
"grad_norm": 0.591471122073177, |
|
"learning_rate": 3.82119433897135e-05, |
|
"loss": 0.4168, |
|
"num_tokens": 525503892.0, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.9408476944573824, |
|
"grad_norm": 0.37635159444985006, |
|
"learning_rate": 3.812564722126338e-05, |
|
"loss": 0.407, |
|
"num_tokens": 528125332.0, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.9455053563111319, |
|
"grad_norm": 0.41163572473058424, |
|
"learning_rate": 3.8039351052813256e-05, |
|
"loss": 0.4084, |
|
"num_tokens": 530746772.0, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.9501630181648812, |
|
"grad_norm": 0.3843982365118871, |
|
"learning_rate": 3.7953054884363134e-05, |
|
"loss": 0.4162, |
|
"num_tokens": 533368212.0, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.9548206800186306, |
|
"grad_norm": 0.4651466028200984, |
|
"learning_rate": 3.786675871591301e-05, |
|
"loss": 0.4091, |
|
"num_tokens": 535989652.0, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.95947834187238, |
|
"grad_norm": 0.32589812875265384, |
|
"learning_rate": 3.77804625474629e-05, |
|
"loss": 0.4003, |
|
"num_tokens": 538611092.0, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.9641360037261295, |
|
"grad_norm": 0.3671450936074502, |
|
"learning_rate": 3.7694166379012776e-05, |
|
"loss": 0.4032, |
|
"num_tokens": 541232532.0, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.9687936655798789, |
|
"grad_norm": 0.30799336780124387, |
|
"learning_rate": 3.7607870210562654e-05, |
|
"loss": 0.403, |
|
"num_tokens": 543853972.0, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.9734513274336283, |
|
"grad_norm": 0.2683570971972702, |
|
"learning_rate": 3.752157404211253e-05, |
|
"loss": 0.4033, |
|
"num_tokens": 546461523.0, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.9781089892873778, |
|
"grad_norm": 0.3238139917000379, |
|
"learning_rate": 3.743527787366241e-05, |
|
"loss": 0.4027, |
|
"num_tokens": 549082963.0, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9827666511411272, |
|
"grad_norm": 0.4169093587901815, |
|
"learning_rate": 3.734898170521229e-05, |
|
"loss": 0.4075, |
|
"num_tokens": 551662348.0, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.9874243129948765, |
|
"grad_norm": 0.29918767290268117, |
|
"learning_rate": 3.7262685536762174e-05, |
|
"loss": 0.4062, |
|
"num_tokens": 554283788.0, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.992081974848626, |
|
"grad_norm": 0.2831248421544475, |
|
"learning_rate": 3.7176389368312045e-05, |
|
"loss": 0.4042, |
|
"num_tokens": 556905228.0, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.9967396367023754, |
|
"grad_norm": 0.322460754267741, |
|
"learning_rate": 3.709009319986193e-05, |
|
"loss": 0.3957, |
|
"num_tokens": 559526668.0, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.00093153237075, |
|
"grad_norm": 0.5158478203711624, |
|
"learning_rate": 3.700379703141181e-05, |
|
"loss": 0.3987, |
|
"num_tokens": 561722124.0, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.0055891942244992, |
|
"grad_norm": 0.3842474431873255, |
|
"learning_rate": 3.6917500862961687e-05, |
|
"loss": 0.2967, |
|
"num_tokens": 564312816.0, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.0102468560782487, |
|
"grad_norm": 0.30902058029027224, |
|
"learning_rate": 3.6831204694511565e-05, |
|
"loss": 0.2768, |
|
"num_tokens": 566934256.0, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.0149045179319982, |
|
"grad_norm": 0.27843760124356054, |
|
"learning_rate": 3.674490852606144e-05, |
|
"loss": 0.2817, |
|
"num_tokens": 569555696.0, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.0195621797857475, |
|
"grad_norm": 0.29052535529866214, |
|
"learning_rate": 3.665861235761132e-05, |
|
"loss": 0.276, |
|
"num_tokens": 572156684.0, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.024219841639497, |
|
"grad_norm": 0.27630310315466183, |
|
"learning_rate": 3.6572316189161206e-05, |
|
"loss": 0.2797, |
|
"num_tokens": 574749092.0, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.0288775034932465, |
|
"grad_norm": 0.278687574814992, |
|
"learning_rate": 3.6486020020711085e-05, |
|
"loss": 0.2787, |
|
"num_tokens": 577370532.0, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.0335351653469957, |
|
"grad_norm": 0.23131688150393118, |
|
"learning_rate": 3.639972385226096e-05, |
|
"loss": 0.2818, |
|
"num_tokens": 579991972.0, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.0381928272007452, |
|
"grad_norm": 0.29831078797973526, |
|
"learning_rate": 3.631342768381084e-05, |
|
"loss": 0.2685, |
|
"num_tokens": 582591688.0, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.0428504890544947, |
|
"grad_norm": 0.2527756012565954, |
|
"learning_rate": 3.622713151536072e-05, |
|
"loss": 0.2788, |
|
"num_tokens": 585213128.0, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.047508150908244, |
|
"grad_norm": 0.25154350991263563, |
|
"learning_rate": 3.6140835346910604e-05, |
|
"loss": 0.2793, |
|
"num_tokens": 587834568.0, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.0521658127619935, |
|
"grad_norm": 0.2850095798638657, |
|
"learning_rate": 3.6054539178460476e-05, |
|
"loss": 0.2657, |
|
"num_tokens": 590415436.0, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.056823474615743, |
|
"grad_norm": 0.31180404727290206, |
|
"learning_rate": 3.5968243010010354e-05, |
|
"loss": 0.276, |
|
"num_tokens": 593036876.0, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.0614811364694923, |
|
"grad_norm": 0.3349544289972451, |
|
"learning_rate": 3.588194684156024e-05, |
|
"loss": 0.2783, |
|
"num_tokens": 595658316.0, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.0661387983232418, |
|
"grad_norm": 0.28515849317064546, |
|
"learning_rate": 3.579565067311012e-05, |
|
"loss": 0.2747, |
|
"num_tokens": 598248306.0, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.0707964601769913, |
|
"grad_norm": 0.2558473515039951, |
|
"learning_rate": 3.5709354504659995e-05, |
|
"loss": 0.2783, |
|
"num_tokens": 600869746.0, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.0754541220307405, |
|
"grad_norm": 0.3055750325974408, |
|
"learning_rate": 3.5623058336209874e-05, |
|
"loss": 0.2827, |
|
"num_tokens": 603469740.0, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.08011178388449, |
|
"grad_norm": 0.2523023068708641, |
|
"learning_rate": 3.553676216775975e-05, |
|
"loss": 0.2915, |
|
"num_tokens": 606032296.0, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.0847694457382393, |
|
"grad_norm": 0.26982046820141947, |
|
"learning_rate": 3.545046599930964e-05, |
|
"loss": 0.2833, |
|
"num_tokens": 608640980.0, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.0894271075919888, |
|
"grad_norm": 0.29368512739305636, |
|
"learning_rate": 3.536416983085951e-05, |
|
"loss": 0.2795, |
|
"num_tokens": 611262420.0, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.0940847694457383, |
|
"grad_norm": 0.3772826473034449, |
|
"learning_rate": 3.5277873662409386e-05, |
|
"loss": 0.2804, |
|
"num_tokens": 613883860.0, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.0987424312994876, |
|
"grad_norm": 0.3154485480529681, |
|
"learning_rate": 3.519157749395927e-05, |
|
"loss": 0.2735, |
|
"num_tokens": 616486010.0, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.103400093153237, |
|
"grad_norm": 0.26976291529898727, |
|
"learning_rate": 3.510528132550915e-05, |
|
"loss": 0.2769, |
|
"num_tokens": 619107450.0, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.1080577550069866, |
|
"grad_norm": 0.2773634956709149, |
|
"learning_rate": 3.501898515705903e-05, |
|
"loss": 0.2616, |
|
"num_tokens": 621728890.0, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.1127154168607358, |
|
"grad_norm": 0.30021623861010804, |
|
"learning_rate": 3.4932688988608906e-05, |
|
"loss": 0.2706, |
|
"num_tokens": 624309344.0, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.1173730787144853, |
|
"grad_norm": 0.2609207885474365, |
|
"learning_rate": 3.4846392820158784e-05, |
|
"loss": 0.2864, |
|
"num_tokens": 626919167.0, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.1220307405682348, |
|
"grad_norm": 0.33890867374743183, |
|
"learning_rate": 3.476009665170867e-05, |
|
"loss": 0.2857, |
|
"num_tokens": 629531326.0, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.126688402421984, |
|
"grad_norm": 0.33533079561986484, |
|
"learning_rate": 3.467380048325855e-05, |
|
"loss": 0.2757, |
|
"num_tokens": 632135386.0, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.1313460642757336, |
|
"grad_norm": 0.2789080559095753, |
|
"learning_rate": 3.458750431480842e-05, |
|
"loss": 0.2699, |
|
"num_tokens": 634747941.0, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.136003726129483, |
|
"grad_norm": 0.2698082442225666, |
|
"learning_rate": 3.4501208146358304e-05, |
|
"loss": 0.282, |
|
"num_tokens": 637344492.0, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.1406613879832324, |
|
"grad_norm": 0.29152261058658296, |
|
"learning_rate": 3.441491197790818e-05, |
|
"loss": 0.2673, |
|
"num_tokens": 639965932.0, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.1453190498369819, |
|
"grad_norm": 0.26522717303501175, |
|
"learning_rate": 3.432861580945806e-05, |
|
"loss": 0.2757, |
|
"num_tokens": 642587372.0, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.1499767116907313, |
|
"grad_norm": 0.2901549255370733, |
|
"learning_rate": 3.424231964100794e-05, |
|
"loss": 0.2728, |
|
"num_tokens": 645178768.0, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.1546343735444806, |
|
"grad_norm": 0.3068102140084383, |
|
"learning_rate": 3.415602347255782e-05, |
|
"loss": 0.2786, |
|
"num_tokens": 647793774.0, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.1592920353982301, |
|
"grad_norm": 0.2370803484062004, |
|
"learning_rate": 3.40697273041077e-05, |
|
"loss": 0.2884, |
|
"num_tokens": 650415214.0, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.1639496972519794, |
|
"grad_norm": 0.28521208376945334, |
|
"learning_rate": 3.398343113565758e-05, |
|
"loss": 0.2807, |
|
"num_tokens": 653036654.0, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.1686073591057289, |
|
"grad_norm": 0.26932885849381305, |
|
"learning_rate": 3.389713496720746e-05, |
|
"loss": 0.2763, |
|
"num_tokens": 655624923.0, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.1732650209594784, |
|
"grad_norm": 0.2538805120966557, |
|
"learning_rate": 3.381083879875734e-05, |
|
"loss": 0.2806, |
|
"num_tokens": 658246363.0, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.1779226828132279, |
|
"grad_norm": 0.243785221427234, |
|
"learning_rate": 3.3724542630307215e-05, |
|
"loss": 0.2792, |
|
"num_tokens": 660864764.0, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.1825803446669771, |
|
"grad_norm": 0.35726148914420935, |
|
"learning_rate": 3.363824646185709e-05, |
|
"loss": 0.2695, |
|
"num_tokens": 663461223.0, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.1872380065207266, |
|
"grad_norm": 0.3062285105387582, |
|
"learning_rate": 3.355195029340698e-05, |
|
"loss": 0.2862, |
|
"num_tokens": 666082663.0, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.191895668374476, |
|
"grad_norm": 0.2965179427739923, |
|
"learning_rate": 3.346565412495685e-05, |
|
"loss": 0.2743, |
|
"num_tokens": 668704103.0, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.1965533302282254, |
|
"grad_norm": 0.31841050941054977, |
|
"learning_rate": 3.3379357956506735e-05, |
|
"loss": 0.2852, |
|
"num_tokens": 671325543.0, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.201210992081975, |
|
"grad_norm": 0.2668801197416085, |
|
"learning_rate": 3.329306178805661e-05, |
|
"loss": 0.2726, |
|
"num_tokens": 673938014.0, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.2058686539357242, |
|
"grad_norm": 0.2619716412853699, |
|
"learning_rate": 3.320676561960649e-05, |
|
"loss": 0.2832, |
|
"num_tokens": 676559454.0, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.2105263157894737, |
|
"grad_norm": 0.27075658128215524, |
|
"learning_rate": 3.312046945115637e-05, |
|
"loss": 0.2796, |
|
"num_tokens": 679180894.0, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.2151839776432232, |
|
"grad_norm": 0.3034321823924177, |
|
"learning_rate": 3.303417328270625e-05, |
|
"loss": 0.2901, |
|
"num_tokens": 681778120.0, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.2198416394969724, |
|
"grad_norm": 0.28559797634175, |
|
"learning_rate": 3.2947877114256126e-05, |
|
"loss": 0.2865, |
|
"num_tokens": 684399560.0, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.224499301350722, |
|
"grad_norm": 0.2620025012126218, |
|
"learning_rate": 3.286158094580601e-05, |
|
"loss": 0.2764, |
|
"num_tokens": 687021000.0, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.2291569632044714, |
|
"grad_norm": 0.3065365336854166, |
|
"learning_rate": 3.277528477735589e-05, |
|
"loss": 0.283, |
|
"num_tokens": 689642440.0, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.2338146250582207, |
|
"grad_norm": 0.31061841933478607, |
|
"learning_rate": 3.268898860890577e-05, |
|
"loss": 0.2865, |
|
"num_tokens": 692242175.0, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.2384722869119702, |
|
"grad_norm": 0.30651408147568526, |
|
"learning_rate": 3.2602692440455645e-05, |
|
"loss": 0.2836, |
|
"num_tokens": 694863615.0, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.2431299487657197, |
|
"grad_norm": 0.31688698207820437, |
|
"learning_rate": 3.2516396272005524e-05, |
|
"loss": 0.2778, |
|
"num_tokens": 697459919.0, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.247787610619469, |
|
"grad_norm": 0.31942806223486203, |
|
"learning_rate": 3.243010010355541e-05, |
|
"loss": 0.2799, |
|
"num_tokens": 700081359.0, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.2524452724732185, |
|
"grad_norm": 0.31353101869290123, |
|
"learning_rate": 3.234380393510528e-05, |
|
"loss": 0.2657, |
|
"num_tokens": 702689027.0, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.257102934326968, |
|
"grad_norm": 0.32712994813622637, |
|
"learning_rate": 3.225750776665516e-05, |
|
"loss": 0.2824, |
|
"num_tokens": 705310467.0, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.2617605961807172, |
|
"grad_norm": 0.25649502957591136, |
|
"learning_rate": 3.217121159820504e-05, |
|
"loss": 0.275, |
|
"num_tokens": 707931907.0, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.2664182580344667, |
|
"grad_norm": 0.27787099637497753, |
|
"learning_rate": 3.208491542975492e-05, |
|
"loss": 0.2746, |
|
"num_tokens": 710553347.0, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.271075919888216, |
|
"grad_norm": 0.28878789150868767, |
|
"learning_rate": 3.19986192613048e-05, |
|
"loss": 0.2792, |
|
"num_tokens": 713174787.0, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.2757335817419655, |
|
"grad_norm": 0.257307550138807, |
|
"learning_rate": 3.191232309285468e-05, |
|
"loss": 0.2801, |
|
"num_tokens": 715796227.0, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.280391243595715, |
|
"grad_norm": 0.23609227989037512, |
|
"learning_rate": 3.1826026924404556e-05, |
|
"loss": 0.2793, |
|
"num_tokens": 718417667.0, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.2850489054494645, |
|
"grad_norm": 0.2463031753879305, |
|
"learning_rate": 3.173973075595444e-05, |
|
"loss": 0.2859, |
|
"num_tokens": 721039107.0, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.2897065673032138, |
|
"grad_norm": 0.2722779089462647, |
|
"learning_rate": 3.165343458750431e-05, |
|
"loss": 0.2766, |
|
"num_tokens": 723660547.0, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.2943642291569633, |
|
"grad_norm": 0.3178746369585837, |
|
"learning_rate": 3.156713841905419e-05, |
|
"loss": 0.2766, |
|
"num_tokens": 726281987.0, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.2990218910107125, |
|
"grad_norm": 0.30536089345920436, |
|
"learning_rate": 3.1480842250604076e-05, |
|
"loss": 0.2929, |
|
"num_tokens": 728895260.0, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.303679552864462, |
|
"grad_norm": 0.323563907379548, |
|
"learning_rate": 3.1394546082153954e-05, |
|
"loss": 0.2757, |
|
"num_tokens": 731507815.0, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.3083372147182115, |
|
"grad_norm": 0.29136024043207664, |
|
"learning_rate": 3.130824991370383e-05, |
|
"loss": 0.2691, |
|
"num_tokens": 734108897.0, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.312994876571961, |
|
"grad_norm": 0.29970041422727933, |
|
"learning_rate": 3.122195374525371e-05, |
|
"loss": 0.2798, |
|
"num_tokens": 736686494.0, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.3176525384257103, |
|
"grad_norm": 0.26717219505855055, |
|
"learning_rate": 3.113565757680359e-05, |
|
"loss": 0.2963, |
|
"num_tokens": 739305242.0, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.3223102002794598, |
|
"grad_norm": 0.30166706406997973, |
|
"learning_rate": 3.1049361408353474e-05, |
|
"loss": 0.2911, |
|
"num_tokens": 741926682.0, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.326967862133209, |
|
"grad_norm": 0.23037762741268314, |
|
"learning_rate": 3.096306523990335e-05, |
|
"loss": 0.2688, |
|
"num_tokens": 744521095.0, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.3316255239869585, |
|
"grad_norm": 0.2513818523895221, |
|
"learning_rate": 3.0876769071453223e-05, |
|
"loss": 0.2822, |
|
"num_tokens": 747116111.0, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.336283185840708, |
|
"grad_norm": 0.25728555675981446, |
|
"learning_rate": 3.079047290300311e-05, |
|
"loss": 0.2833, |
|
"num_tokens": 749737551.0, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.3409408476944573, |
|
"grad_norm": 0.2564221870409054, |
|
"learning_rate": 3.070417673455299e-05, |
|
"loss": 0.2808, |
|
"num_tokens": 752358040.0, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.3455985095482068, |
|
"grad_norm": 0.254766793549684, |
|
"learning_rate": 3.061788056610287e-05, |
|
"loss": 0.285, |
|
"num_tokens": 754979480.0, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.350256171401956, |
|
"grad_norm": 0.24498900221791703, |
|
"learning_rate": 3.053158439765274e-05, |
|
"loss": 0.2754, |
|
"num_tokens": 757600920.0, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.3549138332557056, |
|
"grad_norm": 0.23493264294585314, |
|
"learning_rate": 3.0445288229202625e-05, |
|
"loss": 0.2814, |
|
"num_tokens": 760222360.0, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.359571495109455, |
|
"grad_norm": 0.2630386608386504, |
|
"learning_rate": 3.0358992060752506e-05, |
|
"loss": 0.2637, |
|
"num_tokens": 762843678.0, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.3642291569632046, |
|
"grad_norm": 0.2394316260345572, |
|
"learning_rate": 3.027269589230238e-05, |
|
"loss": 0.2796, |
|
"num_tokens": 765442631.0, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.3688868188169538, |
|
"grad_norm": 0.2390476917151645, |
|
"learning_rate": 3.018639972385226e-05, |
|
"loss": 0.2814, |
|
"num_tokens": 768049921.0, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.3735444806707033, |
|
"grad_norm": 0.2919140428129752, |
|
"learning_rate": 3.010010355540214e-05, |
|
"loss": 0.2772, |
|
"num_tokens": 770646930.0, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.3782021425244526, |
|
"grad_norm": 0.29385816246884944, |
|
"learning_rate": 3.001380738695202e-05, |
|
"loss": 0.2884, |
|
"num_tokens": 773240766.0, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.382859804378202, |
|
"grad_norm": 0.33487740297263696, |
|
"learning_rate": 2.99275112185019e-05, |
|
"loss": 0.2908, |
|
"num_tokens": 775862206.0, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.3875174662319516, |
|
"grad_norm": 0.2577851937226835, |
|
"learning_rate": 2.984121505005178e-05, |
|
"loss": 0.2867, |
|
"num_tokens": 778483646.0, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.392175128085701, |
|
"grad_norm": 0.27996717526951814, |
|
"learning_rate": 2.9754918881601657e-05, |
|
"loss": 0.2791, |
|
"num_tokens": 781105086.0, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.3968327899394504, |
|
"grad_norm": 0.25754838201668395, |
|
"learning_rate": 2.966862271315154e-05, |
|
"loss": 0.2686, |
|
"num_tokens": 783726526.0, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.4014904517931999, |
|
"grad_norm": 0.300697823170609, |
|
"learning_rate": 2.9582326544701417e-05, |
|
"loss": 0.2766, |
|
"num_tokens": 786325616.0, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.4061481136469491, |
|
"grad_norm": 0.35724755171638073, |
|
"learning_rate": 2.9496030376251292e-05, |
|
"loss": 0.2724, |
|
"num_tokens": 788947056.0, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.4108057755006986, |
|
"grad_norm": 0.24677399714229475, |
|
"learning_rate": 2.9409734207801177e-05, |
|
"loss": 0.2816, |
|
"num_tokens": 791538191.0, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.4154634373544481, |
|
"grad_norm": 0.26435033183990186, |
|
"learning_rate": 2.9323438039351052e-05, |
|
"loss": 0.2734, |
|
"num_tokens": 794159631.0, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.4201210992081974, |
|
"grad_norm": 0.32036106613904486, |
|
"learning_rate": 2.9237141870900937e-05, |
|
"loss": 0.2819, |
|
"num_tokens": 796781071.0, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.424778761061947, |
|
"grad_norm": 0.2475476137169364, |
|
"learning_rate": 2.9150845702450812e-05, |
|
"loss": 0.2688, |
|
"num_tokens": 799402511.0, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.4294364229156964, |
|
"grad_norm": 0.2528684630121611, |
|
"learning_rate": 2.906454953400069e-05, |
|
"loss": 0.2883, |
|
"num_tokens": 802023951.0, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.4340940847694457, |
|
"grad_norm": 0.22968741877065332, |
|
"learning_rate": 2.897825336555057e-05, |
|
"loss": 0.2746, |
|
"num_tokens": 804645391.0, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.4387517466231952, |
|
"grad_norm": 0.24342123000808089, |
|
"learning_rate": 2.889195719710045e-05, |
|
"loss": 0.271, |
|
"num_tokens": 807266831.0, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.4434094084769447, |
|
"grad_norm": 0.29535237660947017, |
|
"learning_rate": 2.8805661028650328e-05, |
|
"loss": 0.2692, |
|
"num_tokens": 809888271.0, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.448067070330694, |
|
"grad_norm": 0.275059474845138, |
|
"learning_rate": 2.871936486020021e-05, |
|
"loss": 0.2689, |
|
"num_tokens": 812509711.0, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.4527247321844434, |
|
"grad_norm": 0.26490502059184046, |
|
"learning_rate": 2.8633068691750088e-05, |
|
"loss": 0.28, |
|
"num_tokens": 815131151.0, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.4573823940381927, |
|
"grad_norm": 0.23510205220169592, |
|
"learning_rate": 2.854677252329997e-05, |
|
"loss": 0.2858, |
|
"num_tokens": 817730759.0, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.4620400558919422, |
|
"grad_norm": 0.3374158150586498, |
|
"learning_rate": 2.8460476354849848e-05, |
|
"loss": 0.2978, |
|
"num_tokens": 820352199.0, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.4666977177456917, |
|
"grad_norm": 0.26454799397527445, |
|
"learning_rate": 2.8374180186399723e-05, |
|
"loss": 0.2737, |
|
"num_tokens": 822973639.0, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.4713553795994412, |
|
"grad_norm": 0.26762807423610585, |
|
"learning_rate": 2.8287884017949608e-05, |
|
"loss": 0.2782, |
|
"num_tokens": 825595079.0, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.4760130414531905, |
|
"grad_norm": 0.24025946078804356, |
|
"learning_rate": 2.8201587849499482e-05, |
|
"loss": 0.2795, |
|
"num_tokens": 828216519.0, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.48067070330694, |
|
"grad_norm": 0.24596937742327113, |
|
"learning_rate": 2.811529168104936e-05, |
|
"loss": 0.2768, |
|
"num_tokens": 830837959.0, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.4853283651606892, |
|
"grad_norm": 0.2356611967272471, |
|
"learning_rate": 2.8028995512599242e-05, |
|
"loss": 0.2786, |
|
"num_tokens": 833459399.0, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.4899860270144387, |
|
"grad_norm": 0.24597105088414528, |
|
"learning_rate": 2.794269934414912e-05, |
|
"loss": 0.286, |
|
"num_tokens": 836080839.0, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.4946436888681882, |
|
"grad_norm": 0.2413954227984674, |
|
"learning_rate": 2.7856403175699002e-05, |
|
"loss": 0.2829, |
|
"num_tokens": 838689704.0, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.4993013507219377, |
|
"grad_norm": 0.25744774684417754, |
|
"learning_rate": 2.777010700724888e-05, |
|
"loss": 0.2793, |
|
"num_tokens": 841287483.0, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.503959012575687, |
|
"grad_norm": 0.2821271289814806, |
|
"learning_rate": 2.768381083879876e-05, |
|
"loss": 0.2776, |
|
"num_tokens": 843905126.0, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.5086166744294365, |
|
"grad_norm": 0.29639194566235333, |
|
"learning_rate": 2.759751467034864e-05, |
|
"loss": 0.2888, |
|
"num_tokens": 846525643.0, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.5132743362831858, |
|
"grad_norm": 0.24055196328513787, |
|
"learning_rate": 2.751121850189852e-05, |
|
"loss": 0.2859, |
|
"num_tokens": 849147083.0, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.5179319981369352, |
|
"grad_norm": 0.2624395571193606, |
|
"learning_rate": 2.7424922333448393e-05, |
|
"loss": 0.2817, |
|
"num_tokens": 851768523.0, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.5225896599906847, |
|
"grad_norm": 0.2843888138879249, |
|
"learning_rate": 2.7338626164998278e-05, |
|
"loss": 0.2868, |
|
"num_tokens": 854389963.0, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.5272473218444342, |
|
"grad_norm": 0.2509694150649848, |
|
"learning_rate": 2.7252329996548153e-05, |
|
"loss": 0.2818, |
|
"num_tokens": 857011403.0, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.5319049836981835, |
|
"grad_norm": 0.33386840811737556, |
|
"learning_rate": 2.7166033828098038e-05, |
|
"loss": 0.2837, |
|
"num_tokens": 859632843.0, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.5365626455519328, |
|
"grad_norm": 0.2519060001119771, |
|
"learning_rate": 2.7079737659647913e-05, |
|
"loss": 0.2688, |
|
"num_tokens": 862254283.0, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.5412203074056823, |
|
"grad_norm": 0.26036183172231986, |
|
"learning_rate": 2.699344149119779e-05, |
|
"loss": 0.2694, |
|
"num_tokens": 864875723.0, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.5458779692594318, |
|
"grad_norm": 0.39552084812808797, |
|
"learning_rate": 2.6907145322747673e-05, |
|
"loss": 0.273, |
|
"num_tokens": 867482835.0, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.5505356311131813, |
|
"grad_norm": 0.3280396515519808, |
|
"learning_rate": 2.682084915429755e-05, |
|
"loss": 0.2815, |
|
"num_tokens": 870104275.0, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.5551932929669308, |
|
"grad_norm": 0.24102481952881588, |
|
"learning_rate": 2.673455298584743e-05, |
|
"loss": 0.2819, |
|
"num_tokens": 872694928.0, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.55985095482068, |
|
"grad_norm": 0.2762279871868237, |
|
"learning_rate": 2.664825681739731e-05, |
|
"loss": 0.2774, |
|
"num_tokens": 875316368.0, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.5645086166744293, |
|
"grad_norm": 0.3001583042906396, |
|
"learning_rate": 2.6561960648947186e-05, |
|
"loss": 0.283, |
|
"num_tokens": 877937808.0, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.5691662785281788, |
|
"grad_norm": 0.275315893161812, |
|
"learning_rate": 2.647566448049707e-05, |
|
"loss": 0.2696, |
|
"num_tokens": 880553728.0, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 1.5738239403819283, |
|
"grad_norm": 0.28562133915152954, |
|
"learning_rate": 2.6389368312046945e-05, |
|
"loss": 0.2867, |
|
"num_tokens": 883175168.0, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.5784816022356778, |
|
"grad_norm": 0.2801444380047681, |
|
"learning_rate": 2.6303072143596824e-05, |
|
"loss": 0.278, |
|
"num_tokens": 885796608.0, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.583139264089427, |
|
"grad_norm": 0.2776860382331768, |
|
"learning_rate": 2.6216775975146705e-05, |
|
"loss": 0.2777, |
|
"num_tokens": 888410029.0, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.5877969259431766, |
|
"grad_norm": 0.2508178609607129, |
|
"learning_rate": 2.6130479806696584e-05, |
|
"loss": 0.2778, |
|
"num_tokens": 891031469.0, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.5924545877969258, |
|
"grad_norm": 0.24079007534321614, |
|
"learning_rate": 2.6044183638246462e-05, |
|
"loss": 0.2829, |
|
"num_tokens": 893652909.0, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.5971122496506753, |
|
"grad_norm": 0.24438421533644653, |
|
"learning_rate": 2.5957887469796343e-05, |
|
"loss": 0.2769, |
|
"num_tokens": 896242415.0, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 1.6017699115044248, |
|
"grad_norm": 0.22091161839105622, |
|
"learning_rate": 2.587159130134622e-05, |
|
"loss": 0.2657, |
|
"num_tokens": 898863855.0, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.6064275733581743, |
|
"grad_norm": 0.2611685768405385, |
|
"learning_rate": 2.5785295132896096e-05, |
|
"loss": 0.2823, |
|
"num_tokens": 901485295.0, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.6110852352119236, |
|
"grad_norm": 0.31023224431225427, |
|
"learning_rate": 2.569899896444598e-05, |
|
"loss": 0.2955, |
|
"num_tokens": 904106735.0, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.6157428970656729, |
|
"grad_norm": 0.24266065185424698, |
|
"learning_rate": 2.5612702795995856e-05, |
|
"loss": 0.2892, |
|
"num_tokens": 906728175.0, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 1.6204005589194224, |
|
"grad_norm": 0.22481340786142293, |
|
"learning_rate": 2.552640662754574e-05, |
|
"loss": 0.2724, |
|
"num_tokens": 909349615.0, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.6250582207731719, |
|
"grad_norm": 0.24602537155724646, |
|
"learning_rate": 2.5440110459095616e-05, |
|
"loss": 0.3032, |
|
"num_tokens": 911971055.0, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 1.6297158826269214, |
|
"grad_norm": 0.25533740298732005, |
|
"learning_rate": 2.5353814290645494e-05, |
|
"loss": 0.2896, |
|
"num_tokens": 914580843.0, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.6343735444806708, |
|
"grad_norm": 0.2520278922175136, |
|
"learning_rate": 2.5267518122195376e-05, |
|
"loss": 0.2775, |
|
"num_tokens": 917202283.0, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.6390312063344201, |
|
"grad_norm": 0.6074818493123854, |
|
"learning_rate": 2.5181221953745254e-05, |
|
"loss": 0.2739, |
|
"num_tokens": 919823723.0, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.6436888681881694, |
|
"grad_norm": 0.23875498925866181, |
|
"learning_rate": 2.5094925785295132e-05, |
|
"loss": 0.2622, |
|
"num_tokens": 922445163.0, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 1.648346530041919, |
|
"grad_norm": 0.25996583848456867, |
|
"learning_rate": 2.5008629616845014e-05, |
|
"loss": 0.2766, |
|
"num_tokens": 925060687.0, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.6530041918956684, |
|
"grad_norm": 0.6741356509956318, |
|
"learning_rate": 2.4922333448394892e-05, |
|
"loss": 0.2904, |
|
"num_tokens": 927682127.0, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.6576618537494179, |
|
"grad_norm": 0.2649759220570625, |
|
"learning_rate": 2.483603727994477e-05, |
|
"loss": 0.2706, |
|
"num_tokens": 930303567.0, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.6623195156031674, |
|
"grad_norm": 0.24250480686172254, |
|
"learning_rate": 2.4749741111494652e-05, |
|
"loss": 0.2808, |
|
"num_tokens": 932925007.0, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.6669771774569166, |
|
"grad_norm": 0.2641363849028102, |
|
"learning_rate": 2.466344494304453e-05, |
|
"loss": 0.2826, |
|
"num_tokens": 935546447.0, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.671634839310666, |
|
"grad_norm": 0.25909638449622047, |
|
"learning_rate": 2.457714877459441e-05, |
|
"loss": 0.2705, |
|
"num_tokens": 938167887.0, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.6762925011644154, |
|
"grad_norm": 0.22021948108349393, |
|
"learning_rate": 2.4490852606144287e-05, |
|
"loss": 0.2741, |
|
"num_tokens": 940789327.0, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.680950163018165, |
|
"grad_norm": 0.25132553299495947, |
|
"learning_rate": 2.440455643769417e-05, |
|
"loss": 0.2881, |
|
"num_tokens": 943396878.0, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 1.6856078248719144, |
|
"grad_norm": 0.25101345532688984, |
|
"learning_rate": 2.4318260269244047e-05, |
|
"loss": 0.2785, |
|
"num_tokens": 946011477.0, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.6902654867256637, |
|
"grad_norm": 0.2759051677371723, |
|
"learning_rate": 2.4231964100793925e-05, |
|
"loss": 0.2754, |
|
"num_tokens": 948632917.0, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 1.6949231485794132, |
|
"grad_norm": 0.23143291762750065, |
|
"learning_rate": 2.4145667932343803e-05, |
|
"loss": 0.2663, |
|
"num_tokens": 951254357.0, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.6995808104331624, |
|
"grad_norm": 0.25363871829726614, |
|
"learning_rate": 2.4059371763893685e-05, |
|
"loss": 0.268, |
|
"num_tokens": 953875797.0, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.704238472286912, |
|
"grad_norm": 0.23655353808018872, |
|
"learning_rate": 2.3973075595443563e-05, |
|
"loss": 0.2751, |
|
"num_tokens": 956497237.0, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.7088961341406614, |
|
"grad_norm": 0.23922139243346943, |
|
"learning_rate": 2.388677942699344e-05, |
|
"loss": 0.2817, |
|
"num_tokens": 959085993.0, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 1.713553795994411, |
|
"grad_norm": 0.2602832471502447, |
|
"learning_rate": 2.3800483258543323e-05, |
|
"loss": 0.2713, |
|
"num_tokens": 961707433.0, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.7182114578481602, |
|
"grad_norm": 0.25485339870064433, |
|
"learning_rate": 2.37141870900932e-05, |
|
"loss": 0.2786, |
|
"num_tokens": 964320963.0, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 1.7228691197019095, |
|
"grad_norm": 0.2899423992341778, |
|
"learning_rate": 2.3627890921643083e-05, |
|
"loss": 0.2919, |
|
"num_tokens": 966942403.0, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.727526781555659, |
|
"grad_norm": 0.2745650217168605, |
|
"learning_rate": 2.3541594753192957e-05, |
|
"loss": 0.2788, |
|
"num_tokens": 969563843.0, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 1.7321844434094085, |
|
"grad_norm": 0.329326878812573, |
|
"learning_rate": 2.345529858474284e-05, |
|
"loss": 0.2909, |
|
"num_tokens": 972185283.0, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.736842105263158, |
|
"grad_norm": 0.2521861694240938, |
|
"learning_rate": 2.3369002416292717e-05, |
|
"loss": 0.2684, |
|
"num_tokens": 974806723.0, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 1.7414997671169075, |
|
"grad_norm": 0.21998196672449688, |
|
"learning_rate": 2.32827062478426e-05, |
|
"loss": 0.2706, |
|
"num_tokens": 977428163.0, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.7461574289706567, |
|
"grad_norm": 0.2622969918570912, |
|
"learning_rate": 2.3196410079392474e-05, |
|
"loss": 0.2879, |
|
"num_tokens": 980049603.0, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.750815090824406, |
|
"grad_norm": 0.3115008184293068, |
|
"learning_rate": 2.3110113910942355e-05, |
|
"loss": 0.2854, |
|
"num_tokens": 982671043.0, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.7554727526781555, |
|
"grad_norm": 0.2868213227438428, |
|
"learning_rate": 2.3023817742492234e-05, |
|
"loss": 0.2814, |
|
"num_tokens": 985291047.0, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 1.760130414531905, |
|
"grad_norm": 0.2495004959502272, |
|
"learning_rate": 2.2937521574042115e-05, |
|
"loss": 0.2837, |
|
"num_tokens": 987883669.0, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.7647880763856545, |
|
"grad_norm": 0.24810813168345067, |
|
"learning_rate": 2.285122540559199e-05, |
|
"loss": 0.2885, |
|
"num_tokens": 990481951.0, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 1.7694457382394038, |
|
"grad_norm": 0.25530644222386045, |
|
"learning_rate": 2.276492923714187e-05, |
|
"loss": 0.2792, |
|
"num_tokens": 993103391.0, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.7741034000931533, |
|
"grad_norm": 0.23819977270988402, |
|
"learning_rate": 2.267863306869175e-05, |
|
"loss": 0.2844, |
|
"num_tokens": 995694102.0, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.7787610619469025, |
|
"grad_norm": 0.2266657031311718, |
|
"learning_rate": 2.259233690024163e-05, |
|
"loss": 0.2674, |
|
"num_tokens": 998315542.0, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.783418723800652, |
|
"grad_norm": 0.22723406455775144, |
|
"learning_rate": 2.250604073179151e-05, |
|
"loss": 0.2738, |
|
"num_tokens": 1000936982.0, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 1.7880763856544015, |
|
"grad_norm": 0.2603443049602763, |
|
"learning_rate": 2.2419744563341388e-05, |
|
"loss": 0.2775, |
|
"num_tokens": 1003558422.0, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.792734047508151, |
|
"grad_norm": 0.29402024633598206, |
|
"learning_rate": 2.233344839489127e-05, |
|
"loss": 0.2885, |
|
"num_tokens": 1006149327.0, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.7973917093619003, |
|
"grad_norm": 0.2440028336911219, |
|
"learning_rate": 2.2247152226441148e-05, |
|
"loss": 0.2675, |
|
"num_tokens": 1008761876.0, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.8020493712156498, |
|
"grad_norm": 0.23990807035978515, |
|
"learning_rate": 2.2160856057991026e-05, |
|
"loss": 0.2697, |
|
"num_tokens": 1011383316.0, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 1.806707033069399, |
|
"grad_norm": 0.22574582637141183, |
|
"learning_rate": 2.2074559889540904e-05, |
|
"loss": 0.276, |
|
"num_tokens": 1014004756.0, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.8113646949231486, |
|
"grad_norm": 0.2806755751951861, |
|
"learning_rate": 2.1988263721090786e-05, |
|
"loss": 0.2801, |
|
"num_tokens": 1016604716.0, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 1.816022356776898, |
|
"grad_norm": 0.22478515202508018, |
|
"learning_rate": 2.1901967552640664e-05, |
|
"loss": 0.2678, |
|
"num_tokens": 1019224670.0, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.8206800186306475, |
|
"grad_norm": 0.23339158485383402, |
|
"learning_rate": 2.1815671384190542e-05, |
|
"loss": 0.2739, |
|
"num_tokens": 1021846110.0, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 1.8253376804843968, |
|
"grad_norm": 0.2447423723444364, |
|
"learning_rate": 2.172937521574042e-05, |
|
"loss": 0.2777, |
|
"num_tokens": 1024467550.0, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.829995342338146, |
|
"grad_norm": 0.322101332959102, |
|
"learning_rate": 2.1643079047290302e-05, |
|
"loss": 0.2743, |
|
"num_tokens": 1027061863.0, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 1.8346530041918956, |
|
"grad_norm": 0.2950678541012179, |
|
"learning_rate": 2.155678287884018e-05, |
|
"loss": 0.2643, |
|
"num_tokens": 1029683303.0, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.839310666045645, |
|
"grad_norm": 0.27257343724970284, |
|
"learning_rate": 2.147048671039006e-05, |
|
"loss": 0.2813, |
|
"num_tokens": 1032304743.0, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.8439683278993946, |
|
"grad_norm": 0.23708782005593013, |
|
"learning_rate": 2.1384190541939937e-05, |
|
"loss": 0.2696, |
|
"num_tokens": 1034926183.0, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.848625989753144, |
|
"grad_norm": 0.23810036151619607, |
|
"learning_rate": 2.129789437348982e-05, |
|
"loss": 0.2752, |
|
"num_tokens": 1037538252.0, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 1.8532836516068933, |
|
"grad_norm": 0.22131956602133956, |
|
"learning_rate": 2.1211598205039697e-05, |
|
"loss": 0.2816, |
|
"num_tokens": 1040159692.0, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.8579413134606426, |
|
"grad_norm": 0.26221654380992987, |
|
"learning_rate": 2.1125302036589575e-05, |
|
"loss": 0.276, |
|
"num_tokens": 1042781132.0, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 1.8625989753143921, |
|
"grad_norm": 0.2763721434630872, |
|
"learning_rate": 2.1039005868139457e-05, |
|
"loss": 0.2743, |
|
"num_tokens": 1045402572.0, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.8672566371681416, |
|
"grad_norm": 0.2782332025416994, |
|
"learning_rate": 2.0952709699689335e-05, |
|
"loss": 0.2823, |
|
"num_tokens": 1048018446.0, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 1.871914299021891, |
|
"grad_norm": 0.23803614005307103, |
|
"learning_rate": 2.0866413531239216e-05, |
|
"loss": 0.2686, |
|
"num_tokens": 1050639886.0, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.8765719608756404, |
|
"grad_norm": 0.22674206087841164, |
|
"learning_rate": 2.078011736278909e-05, |
|
"loss": 0.2746, |
|
"num_tokens": 1053261326.0, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 1.8812296227293899, |
|
"grad_norm": 0.22725091660452684, |
|
"learning_rate": 2.0693821194338973e-05, |
|
"loss": 0.2735, |
|
"num_tokens": 1055871697.0, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.8858872845831391, |
|
"grad_norm": 0.24433878595752298, |
|
"learning_rate": 2.060752502588885e-05, |
|
"loss": 0.2815, |
|
"num_tokens": 1058493137.0, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.8905449464368886, |
|
"grad_norm": 0.2666883415037318, |
|
"learning_rate": 2.0521228857438733e-05, |
|
"loss": 0.2753, |
|
"num_tokens": 1061114577.0, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.8952026082906381, |
|
"grad_norm": 0.2569040671161156, |
|
"learning_rate": 2.0434932688988608e-05, |
|
"loss": 0.2721, |
|
"num_tokens": 1063736017.0, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 1.8998602701443876, |
|
"grad_norm": 0.25149835872795157, |
|
"learning_rate": 2.034863652053849e-05, |
|
"loss": 0.2787, |
|
"num_tokens": 1066357457.0, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.904517931998137, |
|
"grad_norm": 0.2361648890464829, |
|
"learning_rate": 2.0262340352088367e-05, |
|
"loss": 0.277, |
|
"num_tokens": 1068978897.0, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 1.9091755938518864, |
|
"grad_norm": 0.25758614769547705, |
|
"learning_rate": 2.017604418363825e-05, |
|
"loss": 0.2832, |
|
"num_tokens": 1071600337.0, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.9138332557056357, |
|
"grad_norm": 0.23652596339114235, |
|
"learning_rate": 2.0089748015188127e-05, |
|
"loss": 0.2886, |
|
"num_tokens": 1074208623.0, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 1.9184909175593852, |
|
"grad_norm": 0.2578336365580039, |
|
"learning_rate": 2.0003451846738005e-05, |
|
"loss": 0.2751, |
|
"num_tokens": 1076785605.0, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.9231485794131347, |
|
"grad_norm": 0.23243843826265423, |
|
"learning_rate": 1.9917155678287887e-05, |
|
"loss": 0.2645, |
|
"num_tokens": 1079407045.0, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 1.9278062412668842, |
|
"grad_norm": 0.25865227066679375, |
|
"learning_rate": 1.9830859509837765e-05, |
|
"loss": 0.2692, |
|
"num_tokens": 1082028485.0, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.9324639031206334, |
|
"grad_norm": 0.2347806044881937, |
|
"learning_rate": 1.9744563341387643e-05, |
|
"loss": 0.2747, |
|
"num_tokens": 1084649925.0, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.9371215649743827, |
|
"grad_norm": 0.27455209060598385, |
|
"learning_rate": 1.9658267172937522e-05, |
|
"loss": 0.2751, |
|
"num_tokens": 1087271365.0, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.9417792268281322, |
|
"grad_norm": 0.22749532966422994, |
|
"learning_rate": 1.9571971004487403e-05, |
|
"loss": 0.2833, |
|
"num_tokens": 1089860165.0, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 1.9464368886818817, |
|
"grad_norm": 0.25182841487010565, |
|
"learning_rate": 1.948567483603728e-05, |
|
"loss": 0.2674, |
|
"num_tokens": 1092481605.0, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.9510945505356312, |
|
"grad_norm": 0.2315781981216672, |
|
"learning_rate": 1.939937866758716e-05, |
|
"loss": 0.2698, |
|
"num_tokens": 1095082526.0, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 1.9557522123893807, |
|
"grad_norm": 0.23764701466290206, |
|
"learning_rate": 1.9313082499137038e-05, |
|
"loss": 0.2771, |
|
"num_tokens": 1097703966.0, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.96040987424313, |
|
"grad_norm": 0.24584826617890893, |
|
"learning_rate": 1.922678633068692e-05, |
|
"loss": 0.286, |
|
"num_tokens": 1100325406.0, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 1.9650675360968792, |
|
"grad_norm": 0.22948941314974505, |
|
"learning_rate": 1.9140490162236798e-05, |
|
"loss": 0.2714, |
|
"num_tokens": 1102946846.0, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.9697251979506287, |
|
"grad_norm": 0.24532426266235913, |
|
"learning_rate": 1.9054193993786676e-05, |
|
"loss": 0.2742, |
|
"num_tokens": 1105554920.0, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 1.9743828598043782, |
|
"grad_norm": 0.23211658563196494, |
|
"learning_rate": 1.8967897825336554e-05, |
|
"loss": 0.275, |
|
"num_tokens": 1108176360.0, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.9790405216581277, |
|
"grad_norm": 0.22106706200612478, |
|
"learning_rate": 1.8881601656886436e-05, |
|
"loss": 0.2783, |
|
"num_tokens": 1110768880.0, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.983698183511877, |
|
"grad_norm": 0.24816638342159197, |
|
"learning_rate": 1.8795305488436314e-05, |
|
"loss": 0.2769, |
|
"num_tokens": 1113390320.0, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.9883558453656265, |
|
"grad_norm": 0.26279439526160714, |
|
"learning_rate": 1.8709009319986192e-05, |
|
"loss": 0.279, |
|
"num_tokens": 1116011760.0, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 1.9930135072193758, |
|
"grad_norm": 0.22476550281934327, |
|
"learning_rate": 1.8622713151536074e-05, |
|
"loss": 0.2755, |
|
"num_tokens": 1118633200.0, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.9976711690731253, |
|
"grad_norm": 0.22321618125402085, |
|
"learning_rate": 1.8536416983085952e-05, |
|
"loss": 0.2841, |
|
"num_tokens": 1121254640.0, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 2.0018630647415, |
|
"grad_norm": 0.22811789448737538, |
|
"learning_rate": 1.8450120814635834e-05, |
|
"loss": 0.2459, |
|
"num_tokens": 1123444248.0, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.0065207265952494, |
|
"grad_norm": 0.23763390471988635, |
|
"learning_rate": 1.836382464618571e-05, |
|
"loss": 0.2109, |
|
"num_tokens": 1126056407.0, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 2.0111783884489984, |
|
"grad_norm": 0.2159028160161467, |
|
"learning_rate": 1.827752847773559e-05, |
|
"loss": 0.2033, |
|
"num_tokens": 1128677847.0, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.015836050302748, |
|
"grad_norm": 0.2354086898913808, |
|
"learning_rate": 1.819123230928547e-05, |
|
"loss": 0.1927, |
|
"num_tokens": 1131289728.0, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 2.0204937121564974, |
|
"grad_norm": 0.22993245593936573, |
|
"learning_rate": 1.810493614083535e-05, |
|
"loss": 0.1869, |
|
"num_tokens": 1133911168.0, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.025151374010247, |
|
"grad_norm": 0.2788929387492099, |
|
"learning_rate": 1.8018639972385225e-05, |
|
"loss": 0.1958, |
|
"num_tokens": 1136511128.0, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 2.0298090358639964, |
|
"grad_norm": 0.28967455780722556, |
|
"learning_rate": 1.7932343803935107e-05, |
|
"loss": 0.1805, |
|
"num_tokens": 1139132568.0, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.034466697717746, |
|
"grad_norm": 0.24813904774828296, |
|
"learning_rate": 1.7846047635484985e-05, |
|
"loss": 0.1903, |
|
"num_tokens": 1141754008.0, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 2.039124359571495, |
|
"grad_norm": 0.26291754418443875, |
|
"learning_rate": 1.7759751467034866e-05, |
|
"loss": 0.2017, |
|
"num_tokens": 1144364017.0, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.0437820214252445, |
|
"grad_norm": 0.21454863669215662, |
|
"learning_rate": 1.767345529858474e-05, |
|
"loss": 0.1843, |
|
"num_tokens": 1146985457.0, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 2.048439683278994, |
|
"grad_norm": 0.21582532066367777, |
|
"learning_rate": 1.7587159130134623e-05, |
|
"loss": 0.1804, |
|
"num_tokens": 1149576149.0, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.0530973451327434, |
|
"grad_norm": 0.21932933980572245, |
|
"learning_rate": 1.75008629616845e-05, |
|
"loss": 0.1963, |
|
"num_tokens": 1152197589.0, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 2.057755006986493, |
|
"grad_norm": 0.2151303685730624, |
|
"learning_rate": 1.7414566793234383e-05, |
|
"loss": 0.1871, |
|
"num_tokens": 1154799708.0, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.062412668840242, |
|
"grad_norm": 0.2222160470869714, |
|
"learning_rate": 1.732827062478426e-05, |
|
"loss": 0.1991, |
|
"num_tokens": 1157421148.0, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 2.0670703306939915, |
|
"grad_norm": 0.3443027048047173, |
|
"learning_rate": 1.724197445633414e-05, |
|
"loss": 0.1861, |
|
"num_tokens": 1160042588.0, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.071727992547741, |
|
"grad_norm": 0.26261630346021714, |
|
"learning_rate": 1.715567828788402e-05, |
|
"loss": 0.1853, |
|
"num_tokens": 1162664028.0, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 2.0763856544014905, |
|
"grad_norm": 0.19977835774642955, |
|
"learning_rate": 1.70693821194339e-05, |
|
"loss": 0.1959, |
|
"num_tokens": 1165285468.0, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.08104331625524, |
|
"grad_norm": 0.21770826209782151, |
|
"learning_rate": 1.6983085950983777e-05, |
|
"loss": 0.1851, |
|
"num_tokens": 1167906908.0, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 2.0857009781089895, |
|
"grad_norm": 0.208891787309407, |
|
"learning_rate": 1.6896789782533655e-05, |
|
"loss": 0.1925, |
|
"num_tokens": 1170528348.0, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.0903586399627385, |
|
"grad_norm": 0.2131965536139105, |
|
"learning_rate": 1.6810493614083537e-05, |
|
"loss": 0.1899, |
|
"num_tokens": 1173119003.0, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 2.095016301816488, |
|
"grad_norm": 0.2036260100247772, |
|
"learning_rate": 1.6724197445633415e-05, |
|
"loss": 0.1785, |
|
"num_tokens": 1175740443.0, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.0996739636702375, |
|
"grad_norm": 0.29436739494405906, |
|
"learning_rate": 1.6637901277183294e-05, |
|
"loss": 0.1959, |
|
"num_tokens": 1178339533.0, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 2.104331625523987, |
|
"grad_norm": 0.22398528196888434, |
|
"learning_rate": 1.6551605108733172e-05, |
|
"loss": 0.1844, |
|
"num_tokens": 1180960973.0, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.1089892873777365, |
|
"grad_norm": 0.4929934728417884, |
|
"learning_rate": 1.6465308940283053e-05, |
|
"loss": 0.2028, |
|
"num_tokens": 1183560581.0, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 2.113646949231486, |
|
"grad_norm": 0.23022866493235755, |
|
"learning_rate": 1.637901277183293e-05, |
|
"loss": 0.1898, |
|
"num_tokens": 1186182021.0, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.118304611085235, |
|
"grad_norm": 0.22441316024216518, |
|
"learning_rate": 1.629271660338281e-05, |
|
"loss": 0.188, |
|
"num_tokens": 1188803461.0, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 2.1229622729389845, |
|
"grad_norm": 0.23904511220914146, |
|
"learning_rate": 1.620642043493269e-05, |
|
"loss": 0.1869, |
|
"num_tokens": 1191413832.0, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.127619934792734, |
|
"grad_norm": 0.20446264923991703, |
|
"learning_rate": 1.612012426648257e-05, |
|
"loss": 0.1872, |
|
"num_tokens": 1194013826.0, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 2.1322775966464835, |
|
"grad_norm": 0.23153824701671225, |
|
"learning_rate": 1.6033828098032448e-05, |
|
"loss": 0.1946, |
|
"num_tokens": 1196635266.0, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.136935258500233, |
|
"grad_norm": 0.2227822286301486, |
|
"learning_rate": 1.5947531929582326e-05, |
|
"loss": 0.1908, |
|
"num_tokens": 1199233871.0, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 2.1415929203539825, |
|
"grad_norm": 0.2482664314610804, |
|
"learning_rate": 1.5861235761132208e-05, |
|
"loss": 0.1982, |
|
"num_tokens": 1201855311.0, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.1462505822077316, |
|
"grad_norm": 0.21568483935394447, |
|
"learning_rate": 1.5774939592682086e-05, |
|
"loss": 0.1948, |
|
"num_tokens": 1204475607.0, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 2.150908244061481, |
|
"grad_norm": 0.20282058165536931, |
|
"learning_rate": 1.5688643424231964e-05, |
|
"loss": 0.1828, |
|
"num_tokens": 1207097047.0, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.1555659059152306, |
|
"grad_norm": 0.6030692140488413, |
|
"learning_rate": 1.5602347255781842e-05, |
|
"loss": 0.1926, |
|
"num_tokens": 1209685847.0, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 2.16022356776898, |
|
"grad_norm": 0.2045510595258148, |
|
"learning_rate": 1.5516051087331724e-05, |
|
"loss": 0.1852, |
|
"num_tokens": 1212307287.0, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.1648812296227296, |
|
"grad_norm": 0.2229396176932403, |
|
"learning_rate": 1.5429754918881602e-05, |
|
"loss": 0.1929, |
|
"num_tokens": 1214920560.0, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 2.1695388914764786, |
|
"grad_norm": 0.22741654294869426, |
|
"learning_rate": 1.534345875043148e-05, |
|
"loss": 0.1841, |
|
"num_tokens": 1217520276.0, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.174196553330228, |
|
"grad_norm": 0.2218804396657255, |
|
"learning_rate": 1.525716258198136e-05, |
|
"loss": 0.1978, |
|
"num_tokens": 1220141716.0, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 2.1788542151839776, |
|
"grad_norm": 0.22397360159092755, |
|
"learning_rate": 1.517086641353124e-05, |
|
"loss": 0.1877, |
|
"num_tokens": 1222742637.0, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.183511877037727, |
|
"grad_norm": 0.22903346357382584, |
|
"learning_rate": 1.508457024508112e-05, |
|
"loss": 0.1926, |
|
"num_tokens": 1225364077.0, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 2.1881695388914766, |
|
"grad_norm": 0.22681455304615486, |
|
"learning_rate": 1.4998274076630997e-05, |
|
"loss": 0.1927, |
|
"num_tokens": 1227985517.0, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.192827200745226, |
|
"grad_norm": 0.21157141925014325, |
|
"learning_rate": 1.4911977908180877e-05, |
|
"loss": 0.1851, |
|
"num_tokens": 1230593185.0, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 2.197484862598975, |
|
"grad_norm": 0.24916960595742724, |
|
"learning_rate": 1.4825681739730757e-05, |
|
"loss": 0.1935, |
|
"num_tokens": 1233214625.0, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.2021425244527246, |
|
"grad_norm": 0.25376290664329243, |
|
"learning_rate": 1.4739385571280637e-05, |
|
"loss": 0.2031, |
|
"num_tokens": 1235780896.0, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 2.206800186306474, |
|
"grad_norm": 0.20737450540956834, |
|
"learning_rate": 1.4653089402830513e-05, |
|
"loss": 0.1909, |
|
"num_tokens": 1238372031.0, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.2114578481602236, |
|
"grad_norm": 0.22617271594955698, |
|
"learning_rate": 1.4566793234380393e-05, |
|
"loss": 0.1863, |
|
"num_tokens": 1240993471.0, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 2.216115510013973, |
|
"grad_norm": 0.27615735313408574, |
|
"learning_rate": 1.4480497065930273e-05, |
|
"loss": 0.1945, |
|
"num_tokens": 1243606026.0, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.2207731718677226, |
|
"grad_norm": 0.2859249546685766, |
|
"learning_rate": 1.4394200897480153e-05, |
|
"loss": 0.2029, |
|
"num_tokens": 1246227466.0, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 2.2254308337214717, |
|
"grad_norm": 0.19939388937682523, |
|
"learning_rate": 1.4307904729030031e-05, |
|
"loss": 0.2021, |
|
"num_tokens": 1248828187.0, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.230088495575221, |
|
"grad_norm": 0.21160041897399634, |
|
"learning_rate": 1.4221608560579911e-05, |
|
"loss": 0.1868, |
|
"num_tokens": 1251449627.0, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 2.2347461574289706, |
|
"grad_norm": 0.22235345505947743, |
|
"learning_rate": 1.4135312392129791e-05, |
|
"loss": 0.1839, |
|
"num_tokens": 1254056658.0, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.23940381928272, |
|
"grad_norm": 0.23712117694873538, |
|
"learning_rate": 1.404901622367967e-05, |
|
"loss": 0.1911, |
|
"num_tokens": 1256678098.0, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 2.2440614811364696, |
|
"grad_norm": 0.24561430494734446, |
|
"learning_rate": 1.3962720055229547e-05, |
|
"loss": 0.1897, |
|
"num_tokens": 1259299538.0, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.248719142990219, |
|
"grad_norm": 0.20679953059186723, |
|
"learning_rate": 1.3876423886779427e-05, |
|
"loss": 0.1905, |
|
"num_tokens": 1261907824.0, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 2.253376804843968, |
|
"grad_norm": 0.22679903703760337, |
|
"learning_rate": 1.3790127718329307e-05, |
|
"loss": 0.1904, |
|
"num_tokens": 1264522423.0, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.2580344666977177, |
|
"grad_norm": 0.21852291008494948, |
|
"learning_rate": 1.3703831549879187e-05, |
|
"loss": 0.184, |
|
"num_tokens": 1267143863.0, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 2.262692128551467, |
|
"grad_norm": 0.20205416517381505, |
|
"learning_rate": 1.3617535381429064e-05, |
|
"loss": 0.1775, |
|
"num_tokens": 1269765303.0, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.2673497904052167, |
|
"grad_norm": 0.21402822625968712, |
|
"learning_rate": 1.3531239212978944e-05, |
|
"loss": 0.1926, |
|
"num_tokens": 1272386743.0, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 2.272007452258966, |
|
"grad_norm": 0.21251576960373053, |
|
"learning_rate": 1.3444943044528824e-05, |
|
"loss": 0.1894, |
|
"num_tokens": 1274982459.0, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.276665114112715, |
|
"grad_norm": 0.19438570118120085, |
|
"learning_rate": 1.3358646876078703e-05, |
|
"loss": 0.1864, |
|
"num_tokens": 1277585298.0, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 2.2813227759664647, |
|
"grad_norm": 0.1969520337468591, |
|
"learning_rate": 1.327235070762858e-05, |
|
"loss": 0.1834, |
|
"num_tokens": 1280205815.0, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.285980437820214, |
|
"grad_norm": 0.19957634159837317, |
|
"learning_rate": 1.318605453917846e-05, |
|
"loss": 0.1919, |
|
"num_tokens": 1282802119.0, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 2.2906380996739637, |
|
"grad_norm": 0.23104800649747617, |
|
"learning_rate": 1.309975837072834e-05, |
|
"loss": 0.1924, |
|
"num_tokens": 1285414071.0, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.295295761527713, |
|
"grad_norm": 0.24111804075309679, |
|
"learning_rate": 1.301346220227822e-05, |
|
"loss": 0.1987, |
|
"num_tokens": 1288023894.0, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 2.2999534233814627, |
|
"grad_norm": 0.21793206443287055, |
|
"learning_rate": 1.2927166033828098e-05, |
|
"loss": 0.1872, |
|
"num_tokens": 1290645334.0, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.3046110852352117, |
|
"grad_norm": 0.2114943769815432, |
|
"learning_rate": 1.2840869865377978e-05, |
|
"loss": 0.1974, |
|
"num_tokens": 1293258755.0, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 2.3092687470889612, |
|
"grad_norm": 0.2091634629424366, |
|
"learning_rate": 1.2754573696927858e-05, |
|
"loss": 0.1832, |
|
"num_tokens": 1295880195.0, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.3139264089427107, |
|
"grad_norm": 0.23428714293181593, |
|
"learning_rate": 1.2668277528477738e-05, |
|
"loss": 0.1894, |
|
"num_tokens": 1298501635.0, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 2.3185840707964602, |
|
"grad_norm": 0.2160517380096932, |
|
"learning_rate": 1.2581981360027614e-05, |
|
"loss": 0.1999, |
|
"num_tokens": 1301121639.0, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.3232417326502097, |
|
"grad_norm": 0.2137407041544956, |
|
"learning_rate": 1.2495685191577494e-05, |
|
"loss": 0.1899, |
|
"num_tokens": 1303743079.0, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 2.3278993945039588, |
|
"grad_norm": 0.19309461413453677, |
|
"learning_rate": 1.2409389023127374e-05, |
|
"loss": 0.1802, |
|
"num_tokens": 1306327934.0, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.3325570563577083, |
|
"grad_norm": 0.21891242654845594, |
|
"learning_rate": 1.2323092854677252e-05, |
|
"loss": 0.1919, |
|
"num_tokens": 1308949374.0, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 2.3372147182114578, |
|
"grad_norm": 0.23605025676718958, |
|
"learning_rate": 1.2236796686227132e-05, |
|
"loss": 0.1905, |
|
"num_tokens": 1311570814.0, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.3418723800652073, |
|
"grad_norm": 0.21455241286434934, |
|
"learning_rate": 1.215050051777701e-05, |
|
"loss": 0.1932, |
|
"num_tokens": 1314192254.0, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 2.3465300419189568, |
|
"grad_norm": 0.2320284991032439, |
|
"learning_rate": 1.206420434932689e-05, |
|
"loss": 0.1854, |
|
"num_tokens": 1316784876.0, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.3511877037727063, |
|
"grad_norm": 0.25293497517809127, |
|
"learning_rate": 1.1977908180876769e-05, |
|
"loss": 0.1918, |
|
"num_tokens": 1319406316.0, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 2.3558453656264557, |
|
"grad_norm": 0.2146459615030723, |
|
"learning_rate": 1.1891612012426649e-05, |
|
"loss": 0.1907, |
|
"num_tokens": 1322027756.0, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.360503027480205, |
|
"grad_norm": 0.21794216655250306, |
|
"learning_rate": 1.1805315843976528e-05, |
|
"loss": 0.1891, |
|
"num_tokens": 1324649196.0, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 2.3651606893339543, |
|
"grad_norm": 0.21526928636835807, |
|
"learning_rate": 1.1719019675526408e-05, |
|
"loss": 0.1816, |
|
"num_tokens": 1327270636.0, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.369818351187704, |
|
"grad_norm": 0.2009434671686828, |
|
"learning_rate": 1.1632723507076287e-05, |
|
"loss": 0.1911, |
|
"num_tokens": 1329892076.0, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 2.3744760130414533, |
|
"grad_norm": 0.21308857609787304, |
|
"learning_rate": 1.1546427338626167e-05, |
|
"loss": 0.1873, |
|
"num_tokens": 1332513516.0, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.3791336748952028, |
|
"grad_norm": 0.20723679300713657, |
|
"learning_rate": 1.1460131170176045e-05, |
|
"loss": 0.1869, |
|
"num_tokens": 1335134956.0, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 2.383791336748952, |
|
"grad_norm": 0.222911629634698, |
|
"learning_rate": 1.1373835001725925e-05, |
|
"loss": 0.1934, |
|
"num_tokens": 1337756396.0, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.3884489986027013, |
|
"grad_norm": 0.2147394848596357, |
|
"learning_rate": 1.1287538833275803e-05, |
|
"loss": 0.1881, |
|
"num_tokens": 1340374946.0, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 2.393106660456451, |
|
"grad_norm": 0.22327545683469197, |
|
"learning_rate": 1.1201242664825683e-05, |
|
"loss": 0.2006, |
|
"num_tokens": 1342996386.0, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.3977643223102003, |
|
"grad_norm": 0.2034574667765081, |
|
"learning_rate": 1.1114946496375561e-05, |
|
"loss": 0.1951, |
|
"num_tokens": 1345617826.0, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 2.40242198416395, |
|
"grad_norm": 0.2113918902606263, |
|
"learning_rate": 1.1028650327925441e-05, |
|
"loss": 0.1909, |
|
"num_tokens": 1348239266.0, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.4070796460176993, |
|
"grad_norm": 0.22419572087626496, |
|
"learning_rate": 1.094235415947532e-05, |
|
"loss": 0.1866, |
|
"num_tokens": 1350860706.0, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 2.4117373078714484, |
|
"grad_norm": 0.19006705350069172, |
|
"learning_rate": 1.0856057991025199e-05, |
|
"loss": 0.1763, |
|
"num_tokens": 1353482146.0, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.416394969725198, |
|
"grad_norm": 0.24584362708804106, |
|
"learning_rate": 1.0769761822575077e-05, |
|
"loss": 0.2, |
|
"num_tokens": 1356044035.0, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 2.4210526315789473, |
|
"grad_norm": 0.21775943305708015, |
|
"learning_rate": 1.0683465654124957e-05, |
|
"loss": 0.2008, |
|
"num_tokens": 1358654903.0, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.425710293432697, |
|
"grad_norm": 0.19777137481318433, |
|
"learning_rate": 1.0597169485674835e-05, |
|
"loss": 0.1848, |
|
"num_tokens": 1361276343.0, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 2.4303679552864463, |
|
"grad_norm": 0.20775299467322017, |
|
"learning_rate": 1.0510873317224715e-05, |
|
"loss": 0.1865, |
|
"num_tokens": 1363897783.0, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.4350256171401954, |
|
"grad_norm": 0.19894049766984748, |
|
"learning_rate": 1.0424577148774595e-05, |
|
"loss": 0.1888, |
|
"num_tokens": 1366519223.0, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 2.439683278993945, |
|
"grad_norm": 0.23413570582928064, |
|
"learning_rate": 1.0338280980324475e-05, |
|
"loss": 0.1982, |
|
"num_tokens": 1369124851.0, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.4443409408476944, |
|
"grad_norm": 0.21657572328477895, |
|
"learning_rate": 1.0251984811874353e-05, |
|
"loss": 0.1819, |
|
"num_tokens": 1371746291.0, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 2.448998602701444, |
|
"grad_norm": 0.20427064714545917, |
|
"learning_rate": 1.0165688643424233e-05, |
|
"loss": 0.1957, |
|
"num_tokens": 1374367731.0, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.4536562645551934, |
|
"grad_norm": 0.2127507944462461, |
|
"learning_rate": 1.0079392474974112e-05, |
|
"loss": 0.1885, |
|
"num_tokens": 1376989171.0, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 2.458313926408943, |
|
"grad_norm": 0.22496735588127278, |
|
"learning_rate": 9.993096306523992e-06, |
|
"loss": 0.1915, |
|
"num_tokens": 1379610611.0, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.4629715882626924, |
|
"grad_norm": 0.250954777611335, |
|
"learning_rate": 9.90680013807387e-06, |
|
"loss": 0.1965, |
|
"num_tokens": 1382232051.0, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 2.4676292501164414, |
|
"grad_norm": 0.239020428214204, |
|
"learning_rate": 9.82050396962375e-06, |
|
"loss": 0.1909, |
|
"num_tokens": 1384853491.0, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.472286911970191, |
|
"grad_norm": 0.21219394873292483, |
|
"learning_rate": 9.734207801173628e-06, |
|
"loss": 0.1853, |
|
"num_tokens": 1387439705.0, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 2.4769445738239404, |
|
"grad_norm": 0.2339132165376453, |
|
"learning_rate": 9.647911632723508e-06, |
|
"loss": 0.1859, |
|
"num_tokens": 1390055579.0, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.48160223567769, |
|
"grad_norm": 0.20931019992718092, |
|
"learning_rate": 9.561615464273386e-06, |
|
"loss": 0.1946, |
|
"num_tokens": 1392677019.0, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 2.4862598975314394, |
|
"grad_norm": 0.21664209547112198, |
|
"learning_rate": 9.475319295823266e-06, |
|
"loss": 0.1879, |
|
"num_tokens": 1395298459.0, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.4909175593851884, |
|
"grad_norm": 0.21377592639772283, |
|
"learning_rate": 9.389023127373144e-06, |
|
"loss": 0.1873, |
|
"num_tokens": 1397919899.0, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 2.495575221238938, |
|
"grad_norm": 0.2100788699209234, |
|
"learning_rate": 9.302726958923024e-06, |
|
"loss": 0.1894, |
|
"num_tokens": 1400541339.0, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.5002328830926874, |
|
"grad_norm": 0.24898127480817744, |
|
"learning_rate": 9.216430790472904e-06, |
|
"loss": 0.1921, |
|
"num_tokens": 1403162779.0, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 2.504890544946437, |
|
"grad_norm": 0.2679217103887551, |
|
"learning_rate": 9.130134622022784e-06, |
|
"loss": 0.1867, |
|
"num_tokens": 1405784219.0, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.5095482068001864, |
|
"grad_norm": 0.264365285211875, |
|
"learning_rate": 9.043838453572662e-06, |
|
"loss": 0.1878, |
|
"num_tokens": 1408405659.0, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 2.514205868653936, |
|
"grad_norm": 0.2177074226742054, |
|
"learning_rate": 8.957542285122542e-06, |
|
"loss": 0.1868, |
|
"num_tokens": 1411027099.0, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.5188635305076854, |
|
"grad_norm": 0.22624315481556287, |
|
"learning_rate": 8.87124611667242e-06, |
|
"loss": 0.1876, |
|
"num_tokens": 1413648539.0, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 2.5235211923614345, |
|
"grad_norm": 0.1978006596252754, |
|
"learning_rate": 8.7849499482223e-06, |
|
"loss": 0.1905, |
|
"num_tokens": 1416269979.0, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.528178854215184, |
|
"grad_norm": 0.274994491294196, |
|
"learning_rate": 8.698653779772179e-06, |
|
"loss": 0.1884, |
|
"num_tokens": 1418891419.0, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 2.5328365160689335, |
|
"grad_norm": 0.19469792288627025, |
|
"learning_rate": 8.612357611322058e-06, |
|
"loss": 0.1949, |
|
"num_tokens": 1421512859.0, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.537494177922683, |
|
"grad_norm": 0.20344558223616066, |
|
"learning_rate": 8.526061442871937e-06, |
|
"loss": 0.1904, |
|
"num_tokens": 1424134299.0, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 2.542151839776432, |
|
"grad_norm": 0.20543803618958464, |
|
"learning_rate": 8.439765274421817e-06, |
|
"loss": 0.1947, |
|
"num_tokens": 1426729315.0, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.5468095016301815, |
|
"grad_norm": 0.20586834460357978, |
|
"learning_rate": 8.353469105971695e-06, |
|
"loss": 0.1799, |
|
"num_tokens": 1429350755.0, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 2.551467163483931, |
|
"grad_norm": 0.20869595757236642, |
|
"learning_rate": 8.267172937521575e-06, |
|
"loss": 0.1849, |
|
"num_tokens": 1431972195.0, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.5561248253376805, |
|
"grad_norm": 0.20469546705647887, |
|
"learning_rate": 8.180876769071453e-06, |
|
"loss": 0.193, |
|
"num_tokens": 1434593635.0, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 2.56078248719143, |
|
"grad_norm": 0.19749997870794891, |
|
"learning_rate": 8.094580600621333e-06, |
|
"loss": 0.1906, |
|
"num_tokens": 1437215075.0, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.5654401490451795, |
|
"grad_norm": 0.21537997550568386, |
|
"learning_rate": 8.008284432171211e-06, |
|
"loss": 0.1905, |
|
"num_tokens": 1439836515.0, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 2.570097810898929, |
|
"grad_norm": 0.227592429204148, |
|
"learning_rate": 7.921988263721091e-06, |
|
"loss": 0.1821, |
|
"num_tokens": 1442457955.0, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.574755472752678, |
|
"grad_norm": 0.234230256816076, |
|
"learning_rate": 7.835692095270971e-06, |
|
"loss": 0.1865, |
|
"num_tokens": 1445062211.0, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 2.5794131346064275, |
|
"grad_norm": 0.25861680354818406, |
|
"learning_rate": 7.749395926820851e-06, |
|
"loss": 0.1848, |
|
"num_tokens": 1447683651.0, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.584070796460177, |
|
"grad_norm": 0.23014158285970068, |
|
"learning_rate": 7.663099758370729e-06, |
|
"loss": 0.1826, |
|
"num_tokens": 1450303010.0, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 2.5887284583139265, |
|
"grad_norm": 0.20030501496288292, |
|
"learning_rate": 7.576803589920608e-06, |
|
"loss": 0.1937, |
|
"num_tokens": 1452924450.0, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.5933861201676756, |
|
"grad_norm": 0.21189324719032693, |
|
"learning_rate": 7.490507421470487e-06, |
|
"loss": 0.1879, |
|
"num_tokens": 1455524225.0, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 2.598043782021425, |
|
"grad_norm": 0.19097546345147012, |
|
"learning_rate": 7.4042112530203655e-06, |
|
"loss": 0.1966, |
|
"num_tokens": 1458115944.0, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.6027014438751745, |
|
"grad_norm": 0.29883914842618964, |
|
"learning_rate": 7.317915084570245e-06, |
|
"loss": 0.1917, |
|
"num_tokens": 1460737384.0, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 2.607359105728924, |
|
"grad_norm": 0.19676602110755637, |
|
"learning_rate": 7.231618916120124e-06, |
|
"loss": 0.1933, |
|
"num_tokens": 1463358824.0, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.6120167675826735, |
|
"grad_norm": 0.1990332465486848, |
|
"learning_rate": 7.1453227476700035e-06, |
|
"loss": 0.1949, |
|
"num_tokens": 1465980264.0, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 2.616674429436423, |
|
"grad_norm": 0.23709753658921823, |
|
"learning_rate": 7.059026579219883e-06, |
|
"loss": 0.189, |
|
"num_tokens": 1468590912.0, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.6213320912901725, |
|
"grad_norm": 0.18992036833544143, |
|
"learning_rate": 6.9727304107697625e-06, |
|
"loss": 0.1774, |
|
"num_tokens": 1471212352.0, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 2.625989753143922, |
|
"grad_norm": 0.2068681005516739, |
|
"learning_rate": 6.886434242319641e-06, |
|
"loss": 0.1919, |
|
"num_tokens": 1473833792.0, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.630647414997671, |
|
"grad_norm": 0.20606667722426614, |
|
"learning_rate": 6.800138073869521e-06, |
|
"loss": 0.194, |
|
"num_tokens": 1476455232.0, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 2.6353050768514206, |
|
"grad_norm": 0.23982760911056664, |
|
"learning_rate": 6.713841905419399e-06, |
|
"loss": 0.1854, |
|
"num_tokens": 1479076672.0, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.63996273870517, |
|
"grad_norm": 0.21073555793675697, |
|
"learning_rate": 6.627545736969279e-06, |
|
"loss": 0.1949, |
|
"num_tokens": 1481698112.0, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 2.6446204005589196, |
|
"grad_norm": 0.19453233190357822, |
|
"learning_rate": 6.541249568519157e-06, |
|
"loss": 0.1981, |
|
"num_tokens": 1484295121.0, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.6492780624126686, |
|
"grad_norm": 0.28615224533623485, |
|
"learning_rate": 6.454953400069037e-06, |
|
"loss": 0.1843, |
|
"num_tokens": 1486916561.0, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 2.653935724266418, |
|
"grad_norm": 0.19196899236511794, |
|
"learning_rate": 6.368657231618916e-06, |
|
"loss": 0.1939, |
|
"num_tokens": 1489515514.0, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.6585933861201676, |
|
"grad_norm": 0.1974018590040202, |
|
"learning_rate": 6.282361063168796e-06, |
|
"loss": 0.1785, |
|
"num_tokens": 1492136954.0, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 2.663251047973917, |
|
"grad_norm": 0.19591164104142184, |
|
"learning_rate": 6.196064894718675e-06, |
|
"loss": 0.1913, |
|
"num_tokens": 1494717079.0, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.6679087098276666, |
|
"grad_norm": 0.19833670363700637, |
|
"learning_rate": 6.109768726268554e-06, |
|
"loss": 0.1892, |
|
"num_tokens": 1497338519.0, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 2.672566371681416, |
|
"grad_norm": 0.20511862285572172, |
|
"learning_rate": 6.023472557818433e-06, |
|
"loss": 0.1875, |
|
"num_tokens": 1499959959.0, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.6772240335351656, |
|
"grad_norm": 0.2007161163079071, |
|
"learning_rate": 5.937176389368312e-06, |
|
"loss": 0.192, |
|
"num_tokens": 1502573694.0, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 2.6818816953889146, |
|
"grad_norm": 0.21203335721803362, |
|
"learning_rate": 5.850880220918191e-06, |
|
"loss": 0.1995, |
|
"num_tokens": 1505152753.0, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.686539357242664, |
|
"grad_norm": 0.1972444939219368, |
|
"learning_rate": 5.76458405246807e-06, |
|
"loss": 0.197, |
|
"num_tokens": 1507745161.0, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 2.6911970190964136, |
|
"grad_norm": 0.18813496250520612, |
|
"learning_rate": 5.67828788401795e-06, |
|
"loss": 0.1897, |
|
"num_tokens": 1510366601.0, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.695854680950163, |
|
"grad_norm": 0.21683948817382884, |
|
"learning_rate": 5.591991715567829e-06, |
|
"loss": 0.1796, |
|
"num_tokens": 1512988041.0, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 2.700512342803912, |
|
"grad_norm": 0.20558268991109876, |
|
"learning_rate": 5.5056955471177085e-06, |
|
"loss": 0.1869, |
|
"num_tokens": 1515609481.0, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.7051700046576617, |
|
"grad_norm": 0.21816979196996153, |
|
"learning_rate": 5.4193993786675876e-06, |
|
"loss": 0.1803, |
|
"num_tokens": 1518230921.0, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 2.709827666511411, |
|
"grad_norm": 0.1974560996544184, |
|
"learning_rate": 5.333103210217467e-06, |
|
"loss": 0.1875, |
|
"num_tokens": 1520838211.0, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.7144853283651607, |
|
"grad_norm": 0.2142391911040735, |
|
"learning_rate": 5.246807041767346e-06, |
|
"loss": 0.1913, |
|
"num_tokens": 1523450790.0, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 2.71914299021891, |
|
"grad_norm": 0.19519798254614276, |
|
"learning_rate": 5.160510873317225e-06, |
|
"loss": 0.1898, |
|
"num_tokens": 1526072230.0, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.7238006520726596, |
|
"grad_norm": 0.19984436672708594, |
|
"learning_rate": 5.074214704867105e-06, |
|
"loss": 0.1927, |
|
"num_tokens": 1528678779.0, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 2.728458313926409, |
|
"grad_norm": 0.19080800820329621, |
|
"learning_rate": 4.987918536416984e-06, |
|
"loss": 0.1876, |
|
"num_tokens": 1531300219.0, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.7331159757801586, |
|
"grad_norm": 0.19259051393816032, |
|
"learning_rate": 4.901622367966863e-06, |
|
"loss": 0.1883, |
|
"num_tokens": 1533921659.0, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 2.7377736376339077, |
|
"grad_norm": 0.20045802931215237, |
|
"learning_rate": 4.815326199516742e-06, |
|
"loss": 0.1921, |
|
"num_tokens": 1536543099.0, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.742431299487657, |
|
"grad_norm": 0.20839258200951158, |
|
"learning_rate": 4.729030031066621e-06, |
|
"loss": 0.1995, |
|
"num_tokens": 1539159628.0, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 2.7470889613414067, |
|
"grad_norm": 0.19936373974638227, |
|
"learning_rate": 4.6427338626165e-06, |
|
"loss": 0.1946, |
|
"num_tokens": 1541781068.0, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.751746623195156, |
|
"grad_norm": 0.22149358888220214, |
|
"learning_rate": 4.556437694166379e-06, |
|
"loss": 0.2, |
|
"num_tokens": 1544402508.0, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 2.7564042850489052, |
|
"grad_norm": 0.1998628890141551, |
|
"learning_rate": 4.470141525716258e-06, |
|
"loss": 0.1955, |
|
"num_tokens": 1547023948.0, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.7610619469026547, |
|
"grad_norm": 0.19675965152173985, |
|
"learning_rate": 4.383845357266138e-06, |
|
"loss": 0.1953, |
|
"num_tokens": 1549645388.0, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 2.765719608756404, |
|
"grad_norm": 0.19054438082125744, |
|
"learning_rate": 4.297549188816017e-06, |
|
"loss": 0.1935, |
|
"num_tokens": 1552266828.0, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.7703772706101537, |
|
"grad_norm": 0.2100247956087177, |
|
"learning_rate": 4.211253020365896e-06, |
|
"loss": 0.1869, |
|
"num_tokens": 1554875512.0, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 2.775034932463903, |
|
"grad_norm": 0.22516581326706617, |
|
"learning_rate": 4.124956851915775e-06, |
|
"loss": 0.1829, |
|
"num_tokens": 1557496952.0, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.7796925943176527, |
|
"grad_norm": 0.22275259848789913, |
|
"learning_rate": 4.0386606834656544e-06, |
|
"loss": 0.1945, |
|
"num_tokens": 1560118392.0, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 2.784350256171402, |
|
"grad_norm": 0.2035646672810568, |
|
"learning_rate": 3.9523645150155335e-06, |
|
"loss": 0.1892, |
|
"num_tokens": 1562730863.0, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.7890079180251512, |
|
"grad_norm": 0.19893135307571172, |
|
"learning_rate": 3.8660683465654126e-06, |
|
"loss": 0.1917, |
|
"num_tokens": 1565302118.0, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 2.7936655798789007, |
|
"grad_norm": 0.1994424746191013, |
|
"learning_rate": 3.779772178115292e-06, |
|
"loss": 0.1907, |
|
"num_tokens": 1567923558.0, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.7983232417326502, |
|
"grad_norm": 0.1941087329302925, |
|
"learning_rate": 3.693476009665171e-06, |
|
"loss": 0.1897, |
|
"num_tokens": 1570544998.0, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 2.8029809035863997, |
|
"grad_norm": 0.19337805581200604, |
|
"learning_rate": 3.6071798412150506e-06, |
|
"loss": 0.185, |
|
"num_tokens": 1573166438.0, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.807638565440149, |
|
"grad_norm": 0.19861773096632682, |
|
"learning_rate": 3.5208836727649297e-06, |
|
"loss": 0.1807, |
|
"num_tokens": 1575787878.0, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 2.8122962272938983, |
|
"grad_norm": 0.20119520009288108, |
|
"learning_rate": 3.434587504314809e-06, |
|
"loss": 0.1941, |
|
"num_tokens": 1578409318.0, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.8169538891476478, |
|
"grad_norm": 0.1901334131646749, |
|
"learning_rate": 3.348291335864688e-06, |
|
"loss": 0.1881, |
|
"num_tokens": 1581003943.0, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 2.8216115510013973, |
|
"grad_norm": 0.2037752073186323, |
|
"learning_rate": 3.2619951674145674e-06, |
|
"loss": 0.1846, |
|
"num_tokens": 1583601169.0, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.8262692128551468, |
|
"grad_norm": 0.19225519448717485, |
|
"learning_rate": 3.1756989989644464e-06, |
|
"loss": 0.1858, |
|
"num_tokens": 1586222609.0, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 2.8309268747088963, |
|
"grad_norm": 0.19973043168505442, |
|
"learning_rate": 3.089402830514325e-06, |
|
"loss": 0.1946, |
|
"num_tokens": 1588841357.0, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.8355845365626458, |
|
"grad_norm": 0.18493025307293057, |
|
"learning_rate": 3.0031066620642046e-06, |
|
"loss": 0.1861, |
|
"num_tokens": 1591462797.0, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 2.840242198416395, |
|
"grad_norm": 0.2135184210397531, |
|
"learning_rate": 2.9168104936140837e-06, |
|
"loss": 0.1858, |
|
"num_tokens": 1594081215.0, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.8448998602701443, |
|
"grad_norm": 0.2040789058976081, |
|
"learning_rate": 2.8305143251639627e-06, |
|
"loss": 0.1899, |
|
"num_tokens": 1596671205.0, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 2.849557522123894, |
|
"grad_norm": 0.20810721325069814, |
|
"learning_rate": 2.7442181567138422e-06, |
|
"loss": 0.1998, |
|
"num_tokens": 1599292645.0, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.8542151839776433, |
|
"grad_norm": 0.19535763016764432, |
|
"learning_rate": 2.6579219882637213e-06, |
|
"loss": 0.1958, |
|
"num_tokens": 1601907286.0, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 2.858872845831393, |
|
"grad_norm": 0.19596015876020706, |
|
"learning_rate": 2.5716258198136004e-06, |
|
"loss": 0.1816, |
|
"num_tokens": 1604525687.0, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.863530507685142, |
|
"grad_norm": 0.20927607583564214, |
|
"learning_rate": 2.4853296513634795e-06, |
|
"loss": 0.1881, |
|
"num_tokens": 1607147127.0, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 2.8681881695388913, |
|
"grad_norm": 0.19480398108677532, |
|
"learning_rate": 2.399033482913359e-06, |
|
"loss": 0.1847, |
|
"num_tokens": 1609768567.0, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.872845831392641, |
|
"grad_norm": 0.18059331005524973, |
|
"learning_rate": 2.312737314463238e-06, |
|
"loss": 0.186, |
|
"num_tokens": 1612390007.0, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 2.8775034932463903, |
|
"grad_norm": 0.28380582647260677, |
|
"learning_rate": 2.226441146013117e-06, |
|
"loss": 0.1965, |
|
"num_tokens": 1615000534.0, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.88216115510014, |
|
"grad_norm": 0.18519192829599476, |
|
"learning_rate": 2.140144977562996e-06, |
|
"loss": 0.1731, |
|
"num_tokens": 1617621974.0, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 2.8868188169538893, |
|
"grad_norm": 0.18943911462800966, |
|
"learning_rate": 2.0538488091128757e-06, |
|
"loss": 0.1932, |
|
"num_tokens": 1620243414.0, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.891476478807639, |
|
"grad_norm": 0.20185510901537124, |
|
"learning_rate": 1.9675526406627547e-06, |
|
"loss": 0.1858, |
|
"num_tokens": 1622864854.0, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 2.896134140661388, |
|
"grad_norm": 0.1849272809577793, |
|
"learning_rate": 1.8812564722126338e-06, |
|
"loss": 0.1849, |
|
"num_tokens": 1625486294.0, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.9007918025151374, |
|
"grad_norm": 0.18974926125584826, |
|
"learning_rate": 1.794960303762513e-06, |
|
"loss": 0.1846, |
|
"num_tokens": 1628107734.0, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 2.905449464368887, |
|
"grad_norm": 0.19805184324376912, |
|
"learning_rate": 1.7086641353123924e-06, |
|
"loss": 0.1891, |
|
"num_tokens": 1630709321.0, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.9101071262226363, |
|
"grad_norm": 0.1923784438938438, |
|
"learning_rate": 1.6223679668622715e-06, |
|
"loss": 0.1917, |
|
"num_tokens": 1633298077.0, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 2.9147647880763854, |
|
"grad_norm": 0.18414721849685736, |
|
"learning_rate": 1.5360717984121505e-06, |
|
"loss": 0.1816, |
|
"num_tokens": 1635910626.0, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.919422449930135, |
|
"grad_norm": 0.19255074604870911, |
|
"learning_rate": 1.4497756299620296e-06, |
|
"loss": 0.1962, |
|
"num_tokens": 1638532066.0, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 2.9240801117838844, |
|
"grad_norm": 0.189954260071868, |
|
"learning_rate": 1.363479461511909e-06, |
|
"loss": 0.193, |
|
"num_tokens": 1641153506.0, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.928737773637634, |
|
"grad_norm": 0.20538589440314248, |
|
"learning_rate": 1.277183293061788e-06, |
|
"loss": 0.1965, |
|
"num_tokens": 1643773175.0, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 2.9333954354913834, |
|
"grad_norm": 0.19336904650694964, |
|
"learning_rate": 1.1908871246116673e-06, |
|
"loss": 0.186, |
|
"num_tokens": 1646380875.0, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.938053097345133, |
|
"grad_norm": 0.26726325995033184, |
|
"learning_rate": 1.1045909561615463e-06, |
|
"loss": 0.1883, |
|
"num_tokens": 1648974711.0, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 2.9427107591988824, |
|
"grad_norm": 0.19928547284660958, |
|
"learning_rate": 1.0182947877114256e-06, |
|
"loss": 0.1908, |
|
"num_tokens": 1651596151.0, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.9473684210526314, |
|
"grad_norm": 0.1999931278950155, |
|
"learning_rate": 9.319986192613048e-07, |
|
"loss": 0.1963, |
|
"num_tokens": 1654217591.0, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 2.952026082906381, |
|
"grad_norm": 0.18857341165059974, |
|
"learning_rate": 8.45702450811184e-07, |
|
"loss": 0.1836, |
|
"num_tokens": 1656839031.0, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.9566837447601304, |
|
"grad_norm": 0.21091493599043323, |
|
"learning_rate": 7.594062823610632e-07, |
|
"loss": 0.1924, |
|
"num_tokens": 1659460471.0, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 2.96134140661388, |
|
"grad_norm": 0.18915430253259063, |
|
"learning_rate": 6.731101139109423e-07, |
|
"loss": 0.1798, |
|
"num_tokens": 1662081911.0, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.9659990684676294, |
|
"grad_norm": 0.19423661318275964, |
|
"learning_rate": 5.868139454608215e-07, |
|
"loss": 0.1915, |
|
"num_tokens": 1664702444.0, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 2.9706567303213784, |
|
"grad_norm": 0.1987475010895318, |
|
"learning_rate": 5.005177770107007e-07, |
|
"loss": 0.1819, |
|
"num_tokens": 1667317968.0, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.975314392175128, |
|
"grad_norm": 0.21052305975759994, |
|
"learning_rate": 4.142216085605799e-07, |
|
"loss": 0.1842, |
|
"num_tokens": 1669939408.0, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 2.9799720540288774, |
|
"grad_norm": 0.18964427486372967, |
|
"learning_rate": 3.279254401104591e-07, |
|
"loss": 0.1879, |
|
"num_tokens": 1672514586.0, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.984629715882627, |
|
"grad_norm": 0.18975450375858877, |
|
"learning_rate": 2.416292716603383e-07, |
|
"loss": 0.1874, |
|
"num_tokens": 1675136026.0, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 2.9892873777363764, |
|
"grad_norm": 0.1909619964360884, |
|
"learning_rate": 1.5533310321021747e-07, |
|
"loss": 0.1757, |
|
"num_tokens": 1677728036.0, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.993945039590126, |
|
"grad_norm": 0.2036554562601111, |
|
"learning_rate": 6.903693476009665e-08, |
|
"loss": 0.1852, |
|
"num_tokens": 1680349476.0, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 2.9976711690731253, |
|
"num_tokens": 1682446628.0, |
|
"step": 3219, |
|
"total_flos": 2.754977641940386e+18, |
|
"train_loss": 0.27190176190408577, |
|
"train_runtime": 89221.753, |
|
"train_samples_per_second": 0.577, |
|
"train_steps_per_second": 0.036 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 3219, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.754977641940386e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|