|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 6.850632911392405, |
|
"eval_steps": 500, |
|
"global_step": 224, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.030379746835443037, |
|
"grad_norm": 6.4974170666129405, |
|
"learning_rate": 8.695652173913044e-07, |
|
"loss": 0.8286, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.060759493670886074, |
|
"grad_norm": 6.289827405515917, |
|
"learning_rate": 1.7391304347826088e-06, |
|
"loss": 0.8151, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.09113924050632911, |
|
"grad_norm": 6.4036402262813406, |
|
"learning_rate": 2.6086956521739132e-06, |
|
"loss": 0.8193, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.12151898734177215, |
|
"grad_norm": 5.94576312643178, |
|
"learning_rate": 3.4782608695652175e-06, |
|
"loss": 0.8001, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.1518987341772152, |
|
"grad_norm": 4.332743590295384, |
|
"learning_rate": 4.347826086956522e-06, |
|
"loss": 0.7664, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.18227848101265823, |
|
"grad_norm": 2.3167722633137267, |
|
"learning_rate": 5.2173913043478265e-06, |
|
"loss": 0.7308, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.21265822784810126, |
|
"grad_norm": 2.0549879974300556, |
|
"learning_rate": 6.086956521739132e-06, |
|
"loss": 0.7232, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.2430379746835443, |
|
"grad_norm": 3.9216268287534466, |
|
"learning_rate": 6.956521739130435e-06, |
|
"loss": 0.7073, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.27341772151898736, |
|
"grad_norm": 4.400648238604796, |
|
"learning_rate": 7.82608695652174e-06, |
|
"loss": 0.7347, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.3037974683544304, |
|
"grad_norm": 4.27563822678231, |
|
"learning_rate": 8.695652173913044e-06, |
|
"loss": 0.7053, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.3341772151898734, |
|
"grad_norm": 4.025198490705621, |
|
"learning_rate": 9.565217391304349e-06, |
|
"loss": 0.6675, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.36455696202531646, |
|
"grad_norm": 3.420456932222551, |
|
"learning_rate": 1.0434782608695653e-05, |
|
"loss": 0.6849, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.3949367088607595, |
|
"grad_norm": 2.1030806427648177, |
|
"learning_rate": 1.1304347826086957e-05, |
|
"loss": 0.6451, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.4253164556962025, |
|
"grad_norm": 1.8697807789878425, |
|
"learning_rate": 1.2173913043478263e-05, |
|
"loss": 0.6094, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.45569620253164556, |
|
"grad_norm": 2.26056148981361, |
|
"learning_rate": 1.3043478260869566e-05, |
|
"loss": 0.6282, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.4860759493670886, |
|
"grad_norm": 1.5886195054713648, |
|
"learning_rate": 1.391304347826087e-05, |
|
"loss": 0.6003, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.5164556962025316, |
|
"grad_norm": 1.1004639599591655, |
|
"learning_rate": 1.4782608695652174e-05, |
|
"loss": 0.5957, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.5468354430379747, |
|
"grad_norm": 1.097476031820012, |
|
"learning_rate": 1.565217391304348e-05, |
|
"loss": 0.5666, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.5772151898734177, |
|
"grad_norm": 1.0052979984531278, |
|
"learning_rate": 1.6521739130434785e-05, |
|
"loss": 0.5719, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.6075949367088608, |
|
"grad_norm": 0.9894560127963354, |
|
"learning_rate": 1.739130434782609e-05, |
|
"loss": 0.5652, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.6379746835443038, |
|
"grad_norm": 0.9132016901726216, |
|
"learning_rate": 1.8260869565217393e-05, |
|
"loss": 0.5456, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.6683544303797468, |
|
"grad_norm": 0.8028655023056698, |
|
"learning_rate": 1.9130434782608697e-05, |
|
"loss": 0.5328, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.6987341772151898, |
|
"grad_norm": 0.9425641229106339, |
|
"learning_rate": 2e-05, |
|
"loss": 0.5481, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.7291139240506329, |
|
"grad_norm": 0.7611348569787119, |
|
"learning_rate": 1.999877856940653e-05, |
|
"loss": 0.5592, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.759493670886076, |
|
"grad_norm": 0.7564654959636459, |
|
"learning_rate": 1.999511457600466e-05, |
|
"loss": 0.5466, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.789873417721519, |
|
"grad_norm": 0.7930008987865393, |
|
"learning_rate": 1.9989008914857115e-05, |
|
"loss": 0.535, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.8202531645569621, |
|
"grad_norm": 0.7493626663234454, |
|
"learning_rate": 1.998046307749216e-05, |
|
"loss": 0.528, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.850632911392405, |
|
"grad_norm": 0.6101946445971717, |
|
"learning_rate": 1.9969479151539238e-05, |
|
"loss": 0.5212, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.8810126582278481, |
|
"grad_norm": 0.5769806586436026, |
|
"learning_rate": 1.9956059820218982e-05, |
|
"loss": 0.5391, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.9113924050632911, |
|
"grad_norm": 0.7157670331581075, |
|
"learning_rate": 1.9940208361687762e-05, |
|
"loss": 0.5242, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.9417721518987342, |
|
"grad_norm": 0.4762911173255839, |
|
"learning_rate": 1.9921928648236855e-05, |
|
"loss": 0.5079, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.9721518987341772, |
|
"grad_norm": 0.5944527252102053, |
|
"learning_rate": 1.990122514534651e-05, |
|
"loss": 0.4989, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 1.010126582278481, |
|
"grad_norm": 0.7298002749171054, |
|
"learning_rate": 1.9878102910595097e-05, |
|
"loss": 0.5064, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 1.040506329113924, |
|
"grad_norm": 0.5414950063745023, |
|
"learning_rate": 1.985256759242359e-05, |
|
"loss": 0.4756, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 1.070886075949367, |
|
"grad_norm": 0.6441360723708793, |
|
"learning_rate": 1.982462542875576e-05, |
|
"loss": 0.4914, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 1.1012658227848102, |
|
"grad_norm": 0.4946596745212841, |
|
"learning_rate": 1.979428324547432e-05, |
|
"loss": 0.4845, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 1.1316455696202532, |
|
"grad_norm": 0.5093476336294606, |
|
"learning_rate": 1.9761548454753455e-05, |
|
"loss": 0.4774, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.1620253164556962, |
|
"grad_norm": 0.5294711825014295, |
|
"learning_rate": 1.972642905324813e-05, |
|
"loss": 0.465, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.1924050632911392, |
|
"grad_norm": 0.42433857748865206, |
|
"learning_rate": 1.9688933620140638e-05, |
|
"loss": 0.4655, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.2227848101265824, |
|
"grad_norm": 0.5103486723929203, |
|
"learning_rate": 1.96490713150448e-05, |
|
"loss": 0.4768, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.2531645569620253, |
|
"grad_norm": 0.49791097543662277, |
|
"learning_rate": 1.9606851875768404e-05, |
|
"loss": 0.4766, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.2835443037974683, |
|
"grad_norm": 0.5283318712004925, |
|
"learning_rate": 1.956228561593441e-05, |
|
"loss": 0.4574, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.3139240506329113, |
|
"grad_norm": 0.47403972426656127, |
|
"learning_rate": 1.9515383422461457e-05, |
|
"loss": 0.4776, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.3443037974683545, |
|
"grad_norm": 0.5069935307311383, |
|
"learning_rate": 1.9466156752904344e-05, |
|
"loss": 0.4764, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.3746835443037975, |
|
"grad_norm": 0.47875932287647166, |
|
"learning_rate": 1.9414617632655114e-05, |
|
"loss": 0.4548, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.4050632911392404, |
|
"grad_norm": 0.48389828010345176, |
|
"learning_rate": 1.9360778652005416e-05, |
|
"loss": 0.472, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.4354430379746836, |
|
"grad_norm": 0.47182918877963814, |
|
"learning_rate": 1.9304652963070868e-05, |
|
"loss": 0.4609, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.4658227848101266, |
|
"grad_norm": 0.5277737630304099, |
|
"learning_rate": 1.9246254276578175e-05, |
|
"loss": 0.4616, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.4962025316455696, |
|
"grad_norm": 0.49037387751432276, |
|
"learning_rate": 1.9185596858515797e-05, |
|
"loss": 0.465, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.5265822784810128, |
|
"grad_norm": 0.4683776025075759, |
|
"learning_rate": 1.9122695526648968e-05, |
|
"loss": 0.4607, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.5569620253164556, |
|
"grad_norm": 0.5657198678079827, |
|
"learning_rate": 1.905756564689991e-05, |
|
"loss": 0.475, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.5873417721518988, |
|
"grad_norm": 0.36540383133355375, |
|
"learning_rate": 1.8990223129594146e-05, |
|
"loss": 0.4554, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.6177215189873417, |
|
"grad_norm": 0.44871637377849793, |
|
"learning_rate": 1.8920684425573865e-05, |
|
"loss": 0.4427, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.6481012658227847, |
|
"grad_norm": 0.41443169088823717, |
|
"learning_rate": 1.884896652217917e-05, |
|
"loss": 0.4709, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.678481012658228, |
|
"grad_norm": 0.44779153436928243, |
|
"learning_rate": 1.877508693909831e-05, |
|
"loss": 0.4749, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.7088607594936709, |
|
"grad_norm": 0.4247551126128865, |
|
"learning_rate": 1.8699063724087905e-05, |
|
"loss": 0.4765, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.7392405063291139, |
|
"grad_norm": 0.6036395212394368, |
|
"learning_rate": 1.862091544856407e-05, |
|
"loss": 0.4363, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.769620253164557, |
|
"grad_norm": 0.4260481151358374, |
|
"learning_rate": 1.854066120306571e-05, |
|
"loss": 0.4441, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.7549989851715053, |
|
"learning_rate": 1.8458320592590976e-05, |
|
"loss": 0.4621, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.830379746835443, |
|
"grad_norm": 0.5058105533094102, |
|
"learning_rate": 1.837391373180801e-05, |
|
"loss": 0.4423, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.8607594936708862, |
|
"grad_norm": 0.5370621851699969, |
|
"learning_rate": 1.8287461240141217e-05, |
|
"loss": 0.4466, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.891139240506329, |
|
"grad_norm": 0.5610764225893673, |
|
"learning_rate": 1.8198984236734246e-05, |
|
"loss": 0.4378, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.9215189873417722, |
|
"grad_norm": 0.5922052596113978, |
|
"learning_rate": 1.8108504335290852e-05, |
|
"loss": 0.4559, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.9518987341772152, |
|
"grad_norm": 0.4739092523369615, |
|
"learning_rate": 1.8016043638794975e-05, |
|
"loss": 0.4448, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.9822784810126581, |
|
"grad_norm": 0.6028554849088902, |
|
"learning_rate": 1.7921624734111292e-05, |
|
"loss": 0.4338, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 2.020253164556962, |
|
"grad_norm": 0.48528388263771194, |
|
"learning_rate": 1.7825270686467567e-05, |
|
"loss": 0.4168, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 2.050632911392405, |
|
"grad_norm": 0.4435067765000118, |
|
"learning_rate": 1.7727005033820117e-05, |
|
"loss": 0.4079, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 2.081012658227848, |
|
"grad_norm": 0.49895054830192304, |
|
"learning_rate": 1.762685178110382e-05, |
|
"loss": 0.3987, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 2.1113924050632913, |
|
"grad_norm": 0.4840645731023052, |
|
"learning_rate": 1.752483539436807e-05, |
|
"loss": 0.4141, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 2.141772151898734, |
|
"grad_norm": 0.49161000311716674, |
|
"learning_rate": 1.7420980794800013e-05, |
|
"loss": 0.3821, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.1721518987341772, |
|
"grad_norm": 0.40635171868742836, |
|
"learning_rate": 1.731531335263669e-05, |
|
"loss": 0.41, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 2.2025316455696204, |
|
"grad_norm": 0.4767019681490631, |
|
"learning_rate": 1.720785888096743e-05, |
|
"loss": 0.4169, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 2.232911392405063, |
|
"grad_norm": 0.5356414359619475, |
|
"learning_rate": 1.7098643629428035e-05, |
|
"loss": 0.3974, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 2.2632911392405064, |
|
"grad_norm": 0.44342208441500364, |
|
"learning_rate": 1.698769427778842e-05, |
|
"loss": 0.3927, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 2.293670886075949, |
|
"grad_norm": 0.4470607689391908, |
|
"learning_rate": 1.687503792943506e-05, |
|
"loss": 0.3973, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.3240506329113924, |
|
"grad_norm": 0.5652174939048651, |
|
"learning_rate": 1.6760702104750046e-05, |
|
"loss": 0.4007, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 2.3544303797468356, |
|
"grad_norm": 0.480791916654712, |
|
"learning_rate": 1.664471473438822e-05, |
|
"loss": 0.3912, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.3848101265822783, |
|
"grad_norm": 0.49626658108912697, |
|
"learning_rate": 1.6527104152454096e-05, |
|
"loss": 0.3843, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 2.4151898734177215, |
|
"grad_norm": 0.5356897609748498, |
|
"learning_rate": 1.6407899089580263e-05, |
|
"loss": 0.3882, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 2.4455696202531647, |
|
"grad_norm": 0.49760734807745505, |
|
"learning_rate": 1.628712866590885e-05, |
|
"loss": 0.3827, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 2.4759493670886075, |
|
"grad_norm": 0.5231652008328098, |
|
"learning_rate": 1.6164822383977912e-05, |
|
"loss": 0.3951, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 2.5063291139240507, |
|
"grad_norm": 0.5159729083805341, |
|
"learning_rate": 1.604101012151436e-05, |
|
"loss": 0.3873, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 2.536708860759494, |
|
"grad_norm": 0.5272589632436286, |
|
"learning_rate": 1.5915722124135227e-05, |
|
"loss": 0.3871, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 2.5670886075949366, |
|
"grad_norm": 0.4764387228380176, |
|
"learning_rate": 1.5788988997959115e-05, |
|
"loss": 0.3861, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 2.59746835443038, |
|
"grad_norm": 0.4669252181715756, |
|
"learning_rate": 1.5660841702129533e-05, |
|
"loss": 0.3795, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.6278481012658226, |
|
"grad_norm": 0.4012441103446406, |
|
"learning_rate": 1.5531311541251995e-05, |
|
"loss": 0.3853, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.6582278481012658, |
|
"grad_norm": 0.4435465833041234, |
|
"learning_rate": 1.540043015774676e-05, |
|
"loss": 0.3873, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.688607594936709, |
|
"grad_norm": 0.3843356668548089, |
|
"learning_rate": 1.5268229524119007e-05, |
|
"loss": 0.3907, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.7189873417721517, |
|
"grad_norm": 0.4441580848844239, |
|
"learning_rate": 1.513474193514842e-05, |
|
"loss": 0.41, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.749367088607595, |
|
"grad_norm": 0.34006736698576706, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.3832, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.779746835443038, |
|
"grad_norm": 0.39379243235273864, |
|
"learning_rate": 1.4864036634258112e-05, |
|
"loss": 0.3848, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.810126582278481, |
|
"grad_norm": 0.3523668648983496, |
|
"learning_rate": 1.4726885051885654e-05, |
|
"loss": 0.3863, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.840506329113924, |
|
"grad_norm": 0.3983109819934856, |
|
"learning_rate": 1.4588578757110359e-05, |
|
"loss": 0.3863, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.8708860759493673, |
|
"grad_norm": 0.380540221826223, |
|
"learning_rate": 1.4449151536240167e-05, |
|
"loss": 0.3779, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.90126582278481, |
|
"grad_norm": 0.41734016320094736, |
|
"learning_rate": 1.4308637449409705e-05, |
|
"loss": 0.3867, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 2.9316455696202532, |
|
"grad_norm": 0.38016674499407604, |
|
"learning_rate": 1.4167070822259868e-05, |
|
"loss": 0.3889, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 2.962025316455696, |
|
"grad_norm": 0.4045997822103201, |
|
"learning_rate": 1.402448623755254e-05, |
|
"loss": 0.3877, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 2.992405063291139, |
|
"grad_norm": 0.3698254473141723, |
|
"learning_rate": 1.3880918526722497e-05, |
|
"loss": 0.3825, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 3.030379746835443, |
|
"grad_norm": 0.522327389194682, |
|
"learning_rate": 1.3736402761368597e-05, |
|
"loss": 0.3446, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 3.060759493670886, |
|
"grad_norm": 0.48797768270381986, |
|
"learning_rate": 1.3590974244686248e-05, |
|
"loss": 0.3323, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 3.091139240506329, |
|
"grad_norm": 0.5317349524299804, |
|
"learning_rate": 1.344466850284333e-05, |
|
"loss": 0.3434, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 3.1215189873417724, |
|
"grad_norm": 0.5825524962365572, |
|
"learning_rate": 1.3297521276301666e-05, |
|
"loss": 0.3381, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 3.151898734177215, |
|
"grad_norm": 0.4775943015873331, |
|
"learning_rate": 1.3149568511086104e-05, |
|
"loss": 0.3211, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 3.1822784810126583, |
|
"grad_norm": 0.37088721176541106, |
|
"learning_rate": 1.300084635000341e-05, |
|
"loss": 0.3243, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 3.212658227848101, |
|
"grad_norm": 0.524410882067887, |
|
"learning_rate": 1.2851391123813075e-05, |
|
"loss": 0.3433, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 3.2430379746835443, |
|
"grad_norm": 0.42287255582884636, |
|
"learning_rate": 1.2701239342352223e-05, |
|
"loss": 0.3391, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 3.2734177215189875, |
|
"grad_norm": 0.4554593428965173, |
|
"learning_rate": 1.2550427685616767e-05, |
|
"loss": 0.3403, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 3.3037974683544302, |
|
"grad_norm": 0.39210904323253903, |
|
"learning_rate": 1.239899299480098e-05, |
|
"loss": 0.3509, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 3.3341772151898734, |
|
"grad_norm": 0.43734491288996724, |
|
"learning_rate": 1.2246972263297718e-05, |
|
"loss": 0.3363, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 3.3645569620253166, |
|
"grad_norm": 0.4047367761425651, |
|
"learning_rate": 1.2094402627661447e-05, |
|
"loss": 0.3402, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 3.3949367088607594, |
|
"grad_norm": 0.3937209895604764, |
|
"learning_rate": 1.1941321358536278e-05, |
|
"loss": 0.3433, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 3.4253164556962026, |
|
"grad_norm": 0.38313925509714003, |
|
"learning_rate": 1.1787765851551296e-05, |
|
"loss": 0.323, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 3.4556962025316453, |
|
"grad_norm": 0.4113012036096517, |
|
"learning_rate": 1.1633773618185302e-05, |
|
"loss": 0.3313, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 3.4860759493670885, |
|
"grad_norm": 0.35108239034348615, |
|
"learning_rate": 1.14793822766033e-05, |
|
"loss": 0.3366, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 3.5164556962025317, |
|
"grad_norm": 0.35242823013256014, |
|
"learning_rate": 1.132462954246688e-05, |
|
"loss": 0.3212, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 3.546835443037975, |
|
"grad_norm": 0.35770624011974284, |
|
"learning_rate": 1.1169553219720828e-05, |
|
"loss": 0.3126, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 3.5772151898734177, |
|
"grad_norm": 0.3131897689300393, |
|
"learning_rate": 1.1014191191358118e-05, |
|
"loss": 0.3263, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 3.607594936708861, |
|
"grad_norm": 0.31506231194363454, |
|
"learning_rate": 1.085858141016566e-05, |
|
"loss": 0.3194, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 3.6379746835443036, |
|
"grad_norm": 0.3300433520032172, |
|
"learning_rate": 1.070276188945293e-05, |
|
"loss": 0.3277, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 3.668354430379747, |
|
"grad_norm": 0.35857960912106696, |
|
"learning_rate": 1.0546770693765859e-05, |
|
"loss": 0.339, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 3.69873417721519, |
|
"grad_norm": 0.3472575557331752, |
|
"learning_rate": 1.0390645929588197e-05, |
|
"loss": 0.3265, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 3.729113924050633, |
|
"grad_norm": 0.3593221910905371, |
|
"learning_rate": 1.0234425736032607e-05, |
|
"loss": 0.3361, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 3.759493670886076, |
|
"grad_norm": 0.35323858073043973, |
|
"learning_rate": 1.007814827552384e-05, |
|
"loss": 0.3197, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 3.7898734177215188, |
|
"grad_norm": 0.3358027219446924, |
|
"learning_rate": 9.92185172447616e-06, |
|
"loss": 0.3371, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 3.820253164556962, |
|
"grad_norm": 0.33462898097721216, |
|
"learning_rate": 9.765574263967397e-06, |
|
"loss": 0.3444, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 3.850632911392405, |
|
"grad_norm": 0.3146369213691247, |
|
"learning_rate": 9.609354070411807e-06, |
|
"loss": 0.329, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 3.8810126582278484, |
|
"grad_norm": 0.3424174883046021, |
|
"learning_rate": 9.453229306234143e-06, |
|
"loss": 0.3372, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 3.911392405063291, |
|
"grad_norm": 0.2943707886567511, |
|
"learning_rate": 9.297238110547075e-06, |
|
"loss": 0.3402, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 3.9417721518987343, |
|
"grad_norm": 0.35308049321203056, |
|
"learning_rate": 9.14141858983434e-06, |
|
"loss": 0.3327, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 3.972151898734177, |
|
"grad_norm": 0.2794126776921179, |
|
"learning_rate": 8.985808808641883e-06, |
|
"loss": 0.3294, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 4.010126582278481, |
|
"grad_norm": 0.34707540544255155, |
|
"learning_rate": 8.830446780279175e-06, |
|
"loss": 0.3366, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 4.040506329113924, |
|
"grad_norm": 0.4310342590093157, |
|
"learning_rate": 8.675370457533122e-06, |
|
"loss": 0.2885, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 4.0708860759493675, |
|
"grad_norm": 0.39223005192351007, |
|
"learning_rate": 8.520617723396702e-06, |
|
"loss": 0.3018, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 4.10126582278481, |
|
"grad_norm": 0.5008029874507762, |
|
"learning_rate": 8.366226381814698e-06, |
|
"loss": 0.2917, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 4.131645569620253, |
|
"grad_norm": 0.3717883851042529, |
|
"learning_rate": 8.212234148448708e-06, |
|
"loss": 0.2948, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 4.162025316455696, |
|
"grad_norm": 0.48772070261352396, |
|
"learning_rate": 8.058678641463724e-06, |
|
"loss": 0.2856, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 4.192405063291139, |
|
"grad_norm": 0.29990235187784275, |
|
"learning_rate": 7.905597372338558e-06, |
|
"loss": 0.299, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 4.222784810126583, |
|
"grad_norm": 0.3646267826485481, |
|
"learning_rate": 7.753027736702283e-06, |
|
"loss": 0.283, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 4.253164556962025, |
|
"grad_norm": 0.3749448960383453, |
|
"learning_rate": 7.601007005199022e-06, |
|
"loss": 0.2815, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 4.283544303797468, |
|
"grad_norm": 0.29510288215437896, |
|
"learning_rate": 7.449572314383237e-06, |
|
"loss": 0.2778, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 4.313924050632911, |
|
"grad_norm": 0.3421691653275206, |
|
"learning_rate": 7.298760657647779e-06, |
|
"loss": 0.2998, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 4.3443037974683545, |
|
"grad_norm": 0.34243454860761985, |
|
"learning_rate": 7.148608876186931e-06, |
|
"loss": 0.2765, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 4.374683544303798, |
|
"grad_norm": 0.3114696535262121, |
|
"learning_rate": 6.999153649996595e-06, |
|
"loss": 0.2821, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 4.405063291139241, |
|
"grad_norm": 0.3663624838947982, |
|
"learning_rate": 6.8504314889138956e-06, |
|
"loss": 0.2919, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 4.435443037974683, |
|
"grad_norm": 0.3094633405060417, |
|
"learning_rate": 6.702478723698336e-06, |
|
"loss": 0.2692, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 4.465822784810126, |
|
"grad_norm": 0.30967837068990584, |
|
"learning_rate": 6.555331497156671e-06, |
|
"loss": 0.2862, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 4.49620253164557, |
|
"grad_norm": 0.36674001741323564, |
|
"learning_rate": 6.4090257553137566e-06, |
|
"loss": 0.2867, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 4.526582278481013, |
|
"grad_norm": 0.300696126518697, |
|
"learning_rate": 6.263597238631405e-06, |
|
"loss": 0.2931, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 4.556962025316456, |
|
"grad_norm": 0.2969652346854883, |
|
"learning_rate": 6.119081473277502e-06, |
|
"loss": 0.2894, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 4.587341772151898, |
|
"grad_norm": 0.31128084271919876, |
|
"learning_rate": 5.975513762447465e-06, |
|
"loss": 0.2891, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 4.6177215189873415, |
|
"grad_norm": 0.29739493327330657, |
|
"learning_rate": 5.832929177740134e-06, |
|
"loss": 0.2857, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 4.648101265822785, |
|
"grad_norm": 0.2867071618954023, |
|
"learning_rate": 5.6913625505902966e-06, |
|
"loss": 0.2928, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 4.678481012658228, |
|
"grad_norm": 0.2886542058623698, |
|
"learning_rate": 5.550848463759835e-06, |
|
"loss": 0.2762, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 4.708860759493671, |
|
"grad_norm": 0.2909709652558278, |
|
"learning_rate": 5.411421242889643e-06, |
|
"loss": 0.2836, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 4.739240506329114, |
|
"grad_norm": 0.28437330504528613, |
|
"learning_rate": 5.273114948114346e-06, |
|
"loss": 0.273, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 4.769620253164557, |
|
"grad_norm": 0.2639747513769549, |
|
"learning_rate": 5.135963365741892e-06, |
|
"loss": 0.2667, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.29796444837888897, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 0.2814, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 4.830379746835443, |
|
"grad_norm": 0.304854313462495, |
|
"learning_rate": 4.865258064851579e-06, |
|
"loss": 0.2823, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 4.860759493670886, |
|
"grad_norm": 0.30257719321842363, |
|
"learning_rate": 4.731770475880995e-06, |
|
"loss": 0.2819, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 4.891139240506329, |
|
"grad_norm": 0.2996911970589863, |
|
"learning_rate": 4.599569842253244e-06, |
|
"loss": 0.2869, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 4.921518987341772, |
|
"grad_norm": 0.29799634956925103, |
|
"learning_rate": 4.468688458748006e-06, |
|
"loss": 0.2866, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 4.951898734177215, |
|
"grad_norm": 0.29617907360825707, |
|
"learning_rate": 4.339158297870469e-06, |
|
"loss": 0.2728, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 4.982278481012658, |
|
"grad_norm": 0.29056554075558516, |
|
"learning_rate": 4.211011002040885e-06, |
|
"loss": 0.2997, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 5.020253164556962, |
|
"grad_norm": 0.3357348394161449, |
|
"learning_rate": 4.084277875864776e-06, |
|
"loss": 0.2409, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 5.050632911392405, |
|
"grad_norm": 0.3261604322472783, |
|
"learning_rate": 3.958989878485644e-06, |
|
"loss": 0.2536, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 5.0810126582278485, |
|
"grad_norm": 0.2808339100023715, |
|
"learning_rate": 3.83517761602209e-06, |
|
"loss": 0.2539, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 5.111392405063291, |
|
"grad_norm": 0.40913027095598886, |
|
"learning_rate": 3.712871334091154e-06, |
|
"loss": 0.2617, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 5.141772151898734, |
|
"grad_norm": 0.3918467633543267, |
|
"learning_rate": 3.592100910419738e-06, |
|
"loss": 0.2438, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 5.172151898734177, |
|
"grad_norm": 0.3107419405776118, |
|
"learning_rate": 3.4728958475459052e-06, |
|
"loss": 0.2545, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 5.2025316455696204, |
|
"grad_norm": 0.34048246278427013, |
|
"learning_rate": 3.355285265611784e-06, |
|
"loss": 0.2483, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 5.232911392405064, |
|
"grad_norm": 0.3639824877961244, |
|
"learning_rate": 3.2392978952499553e-06, |
|
"loss": 0.2427, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 5.263291139240506, |
|
"grad_norm": 0.2715170371452864, |
|
"learning_rate": 3.1249620705649417e-06, |
|
"loss": 0.249, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 5.293670886075949, |
|
"grad_norm": 0.284255183952654, |
|
"learning_rate": 3.0123057222115835e-06, |
|
"loss": 0.2598, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 5.324050632911392, |
|
"grad_norm": 0.33462942711759586, |
|
"learning_rate": 2.9013563705719673e-06, |
|
"loss": 0.2496, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 5.3544303797468356, |
|
"grad_norm": 0.31106129770475943, |
|
"learning_rate": 2.7921411190325753e-06, |
|
"loss": 0.2541, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 5.384810126582279, |
|
"grad_norm": 0.2803838122944453, |
|
"learning_rate": 2.6846866473633126e-06, |
|
"loss": 0.2406, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 5.415189873417722, |
|
"grad_norm": 0.276815621879905, |
|
"learning_rate": 2.579019205199992e-06, |
|
"loss": 0.2406, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 5.445569620253164, |
|
"grad_norm": 0.29344147429774503, |
|
"learning_rate": 2.4751646056319334e-06, |
|
"loss": 0.2547, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 5.4759493670886075, |
|
"grad_norm": 0.2659294541117492, |
|
"learning_rate": 2.373148218896182e-06, |
|
"loss": 0.2519, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 5.506329113924051, |
|
"grad_norm": 0.2842720165003485, |
|
"learning_rate": 2.2729949661798876e-06, |
|
"loss": 0.2492, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 5.536708860759494, |
|
"grad_norm": 0.2588045078733543, |
|
"learning_rate": 2.174729313532433e-06, |
|
"loss": 0.2398, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 5.567088607594937, |
|
"grad_norm": 0.28715393265858163, |
|
"learning_rate": 2.078375265888707e-06, |
|
"loss": 0.2553, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 5.597468354430379, |
|
"grad_norm": 0.28745554828704595, |
|
"learning_rate": 1.9839563612050273e-06, |
|
"loss": 0.2471, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 5.627848101265823, |
|
"grad_norm": 0.2899226041664699, |
|
"learning_rate": 1.8914956647091497e-06, |
|
"loss": 0.2481, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 5.658227848101266, |
|
"grad_norm": 0.2590067155318091, |
|
"learning_rate": 1.8010157632657544e-06, |
|
"loss": 0.2675, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 5.688607594936709, |
|
"grad_norm": 0.25687756224345437, |
|
"learning_rate": 1.7125387598587862e-06, |
|
"loss": 0.2535, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 5.718987341772152, |
|
"grad_norm": 0.2595077909838717, |
|
"learning_rate": 1.6260862681919965e-06, |
|
"loss": 0.2547, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 5.749367088607595, |
|
"grad_norm": 0.2689692147004909, |
|
"learning_rate": 1.5416794074090258e-06, |
|
"loss": 0.2508, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 5.779746835443038, |
|
"grad_norm": 0.26248679544653875, |
|
"learning_rate": 1.459338796934293e-06, |
|
"loss": 0.2527, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 5.810126582278481, |
|
"grad_norm": 0.2531892479672906, |
|
"learning_rate": 1.3790845514359363e-06, |
|
"loss": 0.2558, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 5.840506329113924, |
|
"grad_norm": 0.24298820143174016, |
|
"learning_rate": 1.300936275912098e-06, |
|
"loss": 0.2535, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 5.870886075949367, |
|
"grad_norm": 0.23720099457299162, |
|
"learning_rate": 1.224913060901688e-06, |
|
"loss": 0.2516, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 5.9012658227848105, |
|
"grad_norm": 0.2478735432227225, |
|
"learning_rate": 1.1510334778208332e-06, |
|
"loss": 0.2643, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 5.931645569620253, |
|
"grad_norm": 0.2512383888339122, |
|
"learning_rate": 1.0793155744261352e-06, |
|
"loss": 0.2604, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 5.962025316455696, |
|
"grad_norm": 0.24730027387913064, |
|
"learning_rate": 1.0097768704058542e-06, |
|
"loss": 0.2662, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 5.992405063291139, |
|
"grad_norm": 0.24325883427668096, |
|
"learning_rate": 9.424343531000968e-07, |
|
"loss": 0.2403, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 6.030379746835443, |
|
"grad_norm": 0.31211263509749093, |
|
"learning_rate": 8.773044733510338e-07, |
|
"loss": 0.2362, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 6.060759493670886, |
|
"grad_norm": 0.2737512885730665, |
|
"learning_rate": 8.144031414842012e-07, |
|
"loss": 0.2429, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 6.091139240506329, |
|
"grad_norm": 0.25911659980255664, |
|
"learning_rate": 7.537457234218271e-07, |
|
"loss": 0.2328, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 6.121518987341772, |
|
"grad_norm": 0.23760901390554406, |
|
"learning_rate": 6.953470369291349e-07, |
|
"loss": 0.2306, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 6.151898734177215, |
|
"grad_norm": 0.23753911038915718, |
|
"learning_rate": 6.392213479945852e-07, |
|
"loss": 0.2389, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 6.182278481012658, |
|
"grad_norm": 0.23552145755488546, |
|
"learning_rate": 5.853823673448877e-07, |
|
"loss": 0.2388, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 6.2126582278481015, |
|
"grad_norm": 0.2442014087456443, |
|
"learning_rate": 5.33843247095659e-07, |
|
"loss": 0.221, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 6.243037974683545, |
|
"grad_norm": 0.2696456418903012, |
|
"learning_rate": 4.846165775385459e-07, |
|
"loss": 0.2452, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 6.273417721518987, |
|
"grad_norm": 0.2811169402953374, |
|
"learning_rate": 4.3771438406559173e-07, |
|
"loss": 0.2407, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 6.30379746835443, |
|
"grad_norm": 0.2484087773457379, |
|
"learning_rate": 3.931481242315993e-07, |
|
"loss": 0.2414, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 6.334177215189873, |
|
"grad_norm": 0.26911811496038707, |
|
"learning_rate": 3.5092868495520294e-07, |
|
"loss": 0.2494, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 6.364556962025317, |
|
"grad_norm": 0.23156443461752707, |
|
"learning_rate": 3.110663798593616e-07, |
|
"loss": 0.2438, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 6.39493670886076, |
|
"grad_norm": 0.22517950714907936, |
|
"learning_rate": 2.735709467518699e-07, |
|
"loss": 0.2355, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 6.425316455696202, |
|
"grad_norm": 0.23805252833253757, |
|
"learning_rate": 2.384515452465475e-07, |
|
"loss": 0.2342, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 6.455696202531645, |
|
"grad_norm": 0.22792456827183868, |
|
"learning_rate": 2.0571675452567997e-07, |
|
"loss": 0.2454, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 6.4860759493670885, |
|
"grad_norm": 0.2251059123835004, |
|
"learning_rate": 1.7537457124423896e-07, |
|
"loss": 0.2404, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 6.516455696202532, |
|
"grad_norm": 0.2310710916460582, |
|
"learning_rate": 1.474324075764111e-07, |
|
"loss": 0.2467, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 6.546835443037975, |
|
"grad_norm": 0.2356748861565607, |
|
"learning_rate": 1.2189708940490653e-07, |
|
"loss": 0.2456, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 6.577215189873417, |
|
"grad_norm": 0.24523700563100917, |
|
"learning_rate": 9.877485465349057e-08, |
|
"loss": 0.2296, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 6.6075949367088604, |
|
"grad_norm": 0.22877897634830702, |
|
"learning_rate": 7.807135176314707e-08, |
|
"loss": 0.2424, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 6.637974683544304, |
|
"grad_norm": 0.22596205092905755, |
|
"learning_rate": 5.979163831223988e-08, |
|
"loss": 0.2435, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 6.668354430379747, |
|
"grad_norm": 0.22901270273269697, |
|
"learning_rate": 4.394017978101905e-08, |
|
"loss": 0.231, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 6.69873417721519, |
|
"grad_norm": 0.23549751073449784, |
|
"learning_rate": 3.0520848460765525e-08, |
|
"loss": 0.226, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 6.729113924050633, |
|
"grad_norm": 0.2167704625820124, |
|
"learning_rate": 1.9536922507841227e-08, |
|
"loss": 0.2313, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 6.759493670886076, |
|
"grad_norm": 0.21833223668838464, |
|
"learning_rate": 1.099108514288627e-08, |
|
"loss": 0.2441, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 6.789873417721519, |
|
"grad_norm": 0.233277070307917, |
|
"learning_rate": 4.885423995341088e-09, |
|
"loss": 0.2409, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 6.820253164556962, |
|
"grad_norm": 0.23319931748778974, |
|
"learning_rate": 1.2214305934699078e-09, |
|
"loss": 0.2305, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 6.850632911392405, |
|
"grad_norm": 0.22865911802144936, |
|
"learning_rate": 0.0, |
|
"loss": 0.2344, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 6.850632911392405, |
|
"step": 224, |
|
"total_flos": 8.149472647147684e+17, |
|
"train_loss": 0.3728780325369111, |
|
"train_runtime": 25402.5047, |
|
"train_samples_per_second": 0.871, |
|
"train_steps_per_second": 0.009 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 224, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 7, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.149472647147684e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|