|
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 252.32,
|
|
"eval_steps": 500,
|
|
"global_step": 3280,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.8,
|
|
"grad_norm": 79.95787811279297,
|
|
"learning_rate": 1.2e-05,
|
|
"loss": 10.8172,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 1.56,
|
|
"grad_norm": 15.179234504699707,
|
|
"learning_rate": 3.2000000000000005e-05,
|
|
"loss": 1.4707,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 2.32,
|
|
"grad_norm": 5.771359443664551,
|
|
"learning_rate": 5.2000000000000004e-05,
|
|
"loss": 0.8206,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 3.08,
|
|
"grad_norm": 5.559211730957031,
|
|
"learning_rate": 7.2e-05,
|
|
"loss": 0.9515,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 3.88,
|
|
"grad_norm": 2.7674646377563477,
|
|
"learning_rate": 9.200000000000001e-05,
|
|
"loss": 0.7728,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 4.64,
|
|
"grad_norm": 3.8835511207580566,
|
|
"learning_rate": 9.994979079497908e-05,
|
|
"loss": 0.8743,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 5.4,
|
|
"grad_norm": 2.758652925491333,
|
|
"learning_rate": 9.986610878661087e-05,
|
|
"loss": 0.6475,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 6.16,
|
|
"grad_norm": 1.3047797679901123,
|
|
"learning_rate": 9.978242677824268e-05,
|
|
"loss": 0.7883,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 6.96,
|
|
"grad_norm": 3.255369186401367,
|
|
"learning_rate": 9.969874476987448e-05,
|
|
"loss": 2.3198,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 7.72,
|
|
"grad_norm": 2.8563857078552246,
|
|
"learning_rate": 9.961506276150628e-05,
|
|
"loss": 0.6541,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 8.48,
|
|
"grad_norm": 1.9255300760269165,
|
|
"learning_rate": 9.953138075313808e-05,
|
|
"loss": 0.7187,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 9.24,
|
|
"grad_norm": 2.496511697769165,
|
|
"learning_rate": 9.944769874476987e-05,
|
|
"loss": 0.6086,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 10.0,
|
|
"grad_norm": 1.0335757732391357,
|
|
"learning_rate": 9.936401673640167e-05,
|
|
"loss": 0.5804,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 10.8,
|
|
"grad_norm": 2.37092661857605,
|
|
"learning_rate": 9.928033472803347e-05,
|
|
"loss": 0.4609,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 11.56,
|
|
"grad_norm": 5.012497901916504,
|
|
"learning_rate": 9.919665271966527e-05,
|
|
"loss": 0.4665,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 12.32,
|
|
"grad_norm": 1.2357938289642334,
|
|
"learning_rate": 9.911297071129707e-05,
|
|
"loss": 0.541,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 13.08,
|
|
"grad_norm": 3.381162405014038,
|
|
"learning_rate": 9.902928870292888e-05,
|
|
"loss": 0.5019,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 13.88,
|
|
"grad_norm": 1.603922724723816,
|
|
"learning_rate": 9.894560669456067e-05,
|
|
"loss": 0.408,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 14.64,
|
|
"grad_norm": 1.6547144651412964,
|
|
"learning_rate": 9.886192468619247e-05,
|
|
"loss": 0.3947,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 15.4,
|
|
"grad_norm": 0.8650034070014954,
|
|
"learning_rate": 9.877824267782427e-05,
|
|
"loss": 0.593,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 16.16,
|
|
"grad_norm": 0.9181660413742065,
|
|
"learning_rate": 9.869456066945607e-05,
|
|
"loss": 0.3337,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 16.96,
|
|
"grad_norm": 1.2371879816055298,
|
|
"learning_rate": 9.861087866108786e-05,
|
|
"loss": 0.4019,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 17.72,
|
|
"grad_norm": 1.0304924249649048,
|
|
"learning_rate": 9.852719665271966e-05,
|
|
"loss": 0.3883,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 18.48,
|
|
"grad_norm": 1.2461388111114502,
|
|
"learning_rate": 9.844351464435146e-05,
|
|
"loss": 0.6623,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 19.24,
|
|
"grad_norm": 0.6923120617866516,
|
|
"learning_rate": 9.835983263598327e-05,
|
|
"loss": 0.3604,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 20.0,
|
|
"grad_norm": 4.839296817779541,
|
|
"learning_rate": 9.827615062761507e-05,
|
|
"loss": 0.387,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 20.8,
|
|
"grad_norm": 1.1753798723220825,
|
|
"learning_rate": 9.819246861924687e-05,
|
|
"loss": 0.3514,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 21.56,
|
|
"grad_norm": 0.9725382924079895,
|
|
"learning_rate": 9.810878661087866e-05,
|
|
"loss": 0.3922,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 22.32,
|
|
"grad_norm": 0.645702600479126,
|
|
"learning_rate": 9.802510460251046e-05,
|
|
"loss": 0.3331,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 23.08,
|
|
"grad_norm": 1.4330626726150513,
|
|
"learning_rate": 9.794142259414226e-05,
|
|
"loss": 0.3946,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 23.88,
|
|
"grad_norm": 1.2405084371566772,
|
|
"learning_rate": 9.785774058577406e-05,
|
|
"loss": 0.354,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 24.64,
|
|
"grad_norm": 0.9036368131637573,
|
|
"learning_rate": 9.777405857740585e-05,
|
|
"loss": 0.3856,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 25.4,
|
|
"grad_norm": 0.7258665561676025,
|
|
"learning_rate": 9.769037656903767e-05,
|
|
"loss": 0.2859,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 26.16,
|
|
"grad_norm": 0.4196911156177521,
|
|
"learning_rate": 9.760669456066946e-05,
|
|
"loss": 0.2962,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 26.96,
|
|
"grad_norm": 1.2342430353164673,
|
|
"learning_rate": 9.752301255230126e-05,
|
|
"loss": 0.3721,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 27.72,
|
|
"grad_norm": 0.6905648112297058,
|
|
"learning_rate": 9.743933054393306e-05,
|
|
"loss": 0.2722,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 28.48,
|
|
"grad_norm": 0.851387083530426,
|
|
"learning_rate": 9.735564853556486e-05,
|
|
"loss": 0.3564,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 29.24,
|
|
"grad_norm": 0.5951876640319824,
|
|
"learning_rate": 9.727196652719665e-05,
|
|
"loss": 0.2892,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 30.0,
|
|
"grad_norm": 0.5764455199241638,
|
|
"learning_rate": 9.718828451882845e-05,
|
|
"loss": 0.3402,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 30.8,
|
|
"grad_norm": 7.990225315093994,
|
|
"learning_rate": 9.710460251046025e-05,
|
|
"loss": 0.3439,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 31.56,
|
|
"grad_norm": 1.119363784790039,
|
|
"learning_rate": 9.702092050209205e-05,
|
|
"loss": 0.2595,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 32.32,
|
|
"grad_norm": 27.885967254638672,
|
|
"learning_rate": 9.693723849372386e-05,
|
|
"loss": 0.9194,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 33.08,
|
|
"grad_norm": 5.194603443145752,
|
|
"learning_rate": 9.685355648535566e-05,
|
|
"loss": 0.3321,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 33.88,
|
|
"grad_norm": 2.1785478591918945,
|
|
"learning_rate": 9.676987447698745e-05,
|
|
"loss": 0.3875,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 34.64,
|
|
"grad_norm": 0.844071626663208,
|
|
"learning_rate": 9.668619246861925e-05,
|
|
"loss": 0.2458,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 35.4,
|
|
"grad_norm": 2.1278724670410156,
|
|
"learning_rate": 9.660251046025105e-05,
|
|
"loss": 0.358,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 36.16,
|
|
"grad_norm": 0.6194890141487122,
|
|
"learning_rate": 9.651882845188285e-05,
|
|
"loss": 0.3334,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 36.96,
|
|
"grad_norm": 0.7329260110855103,
|
|
"learning_rate": 9.643514644351464e-05,
|
|
"loss": 0.2981,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 37.72,
|
|
"grad_norm": 0.8790725469589233,
|
|
"learning_rate": 9.635146443514644e-05,
|
|
"loss": 0.2903,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 38.48,
|
|
"grad_norm": 0.5892160534858704,
|
|
"learning_rate": 9.626778242677825e-05,
|
|
"loss": 0.362,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 39.24,
|
|
"grad_norm": 1.6055381298065186,
|
|
"learning_rate": 9.618410041841005e-05,
|
|
"loss": 0.3838,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 40.0,
|
|
"grad_norm": 11.784494400024414,
|
|
"learning_rate": 9.610041841004185e-05,
|
|
"loss": 0.3082,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 40.8,
|
|
"grad_norm": 0.5743730664253235,
|
|
"learning_rate": 9.601673640167365e-05,
|
|
"loss": 0.31,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 41.56,
|
|
"grad_norm": 0.592658519744873,
|
|
"learning_rate": 9.593305439330544e-05,
|
|
"loss": 0.5208,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 42.32,
|
|
"grad_norm": 5.394102573394775,
|
|
"learning_rate": 9.584937238493724e-05,
|
|
"loss": 0.4108,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 43.08,
|
|
"grad_norm": 0.7347181439399719,
|
|
"learning_rate": 9.576569037656904e-05,
|
|
"loss": 0.3476,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 43.88,
|
|
"grad_norm": 0.9459385871887207,
|
|
"learning_rate": 9.568200836820084e-05,
|
|
"loss": 0.3008,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 44.64,
|
|
"grad_norm": 5.524572849273682,
|
|
"learning_rate": 9.559832635983263e-05,
|
|
"loss": 0.266,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 45.4,
|
|
"grad_norm": 0.705575704574585,
|
|
"learning_rate": 9.551464435146445e-05,
|
|
"loss": 0.3705,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 46.16,
|
|
"grad_norm": 0.5305906534194946,
|
|
"learning_rate": 9.543096234309624e-05,
|
|
"loss": 0.2619,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 46.96,
|
|
"grad_norm": 3.7031350135803223,
|
|
"learning_rate": 9.534728033472804e-05,
|
|
"loss": 0.4041,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 47.72,
|
|
"grad_norm": 0.9455975294113159,
|
|
"learning_rate": 9.526359832635984e-05,
|
|
"loss": 0.2948,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 48.48,
|
|
"grad_norm": 0.13628531992435455,
|
|
"learning_rate": 9.517991631799164e-05,
|
|
"loss": 0.2962,
|
|
"step": 630
|
|
},
|
|
{
|
|
"epoch": 49.24,
|
|
"grad_norm": 0.5263031721115112,
|
|
"learning_rate": 9.509623430962343e-05,
|
|
"loss": 0.3395,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 50.0,
|
|
"grad_norm": 0.47290968894958496,
|
|
"learning_rate": 9.501255230125523e-05,
|
|
"loss": 0.3163,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 50.8,
|
|
"grad_norm": 0.7216980457305908,
|
|
"learning_rate": 9.492887029288703e-05,
|
|
"loss": 0.303,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 51.56,
|
|
"grad_norm": 0.44606828689575195,
|
|
"learning_rate": 9.484518828451884e-05,
|
|
"loss": 0.3086,
|
|
"step": 670
|
|
},
|
|
{
|
|
"epoch": 52.32,
|
|
"grad_norm": 0.6489527821540833,
|
|
"learning_rate": 9.476150627615064e-05,
|
|
"loss": 0.3226,
|
|
"step": 680
|
|
},
|
|
{
|
|
"epoch": 53.08,
|
|
"grad_norm": 0.4259757101535797,
|
|
"learning_rate": 9.467782426778243e-05,
|
|
"loss": 0.2513,
|
|
"step": 690
|
|
},
|
|
{
|
|
"epoch": 53.88,
|
|
"grad_norm": 0.5952382683753967,
|
|
"learning_rate": 9.459414225941423e-05,
|
|
"loss": 0.3113,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 54.64,
|
|
"grad_norm": 1.0894474983215332,
|
|
"learning_rate": 9.451046025104603e-05,
|
|
"loss": 0.2583,
|
|
"step": 710
|
|
},
|
|
{
|
|
"epoch": 55.4,
|
|
"grad_norm": 0.5647149085998535,
|
|
"learning_rate": 9.442677824267783e-05,
|
|
"loss": 0.3042,
|
|
"step": 720
|
|
},
|
|
{
|
|
"epoch": 56.16,
|
|
"grad_norm": 0.9288455843925476,
|
|
"learning_rate": 9.434309623430963e-05,
|
|
"loss": 0.3281,
|
|
"step": 730
|
|
},
|
|
{
|
|
"epoch": 56.96,
|
|
"grad_norm": 0.4492562711238861,
|
|
"learning_rate": 9.425941422594142e-05,
|
|
"loss": 0.3379,
|
|
"step": 740
|
|
},
|
|
{
|
|
"epoch": 57.72,
|
|
"grad_norm": 1.0189441442489624,
|
|
"learning_rate": 9.417573221757323e-05,
|
|
"loss": 0.3194,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 58.48,
|
|
"grad_norm": 0.40981024503707886,
|
|
"learning_rate": 9.409205020920503e-05,
|
|
"loss": 0.2423,
|
|
"step": 760
|
|
},
|
|
{
|
|
"epoch": 59.24,
|
|
"grad_norm": 0.354190468788147,
|
|
"learning_rate": 9.400836820083683e-05,
|
|
"loss": 0.3266,
|
|
"step": 770
|
|
},
|
|
{
|
|
"epoch": 60.0,
|
|
"grad_norm": 0.4593465328216553,
|
|
"learning_rate": 9.392468619246863e-05,
|
|
"loss": 0.3068,
|
|
"step": 780
|
|
},
|
|
{
|
|
"epoch": 60.8,
|
|
"grad_norm": 0.759248673915863,
|
|
"learning_rate": 9.384100418410042e-05,
|
|
"loss": 0.3078,
|
|
"step": 790
|
|
},
|
|
{
|
|
"epoch": 61.56,
|
|
"grad_norm": 0.8159565925598145,
|
|
"learning_rate": 9.375732217573222e-05,
|
|
"loss": 0.2668,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 62.32,
|
|
"grad_norm": 0.7888874411582947,
|
|
"learning_rate": 9.367364016736402e-05,
|
|
"loss": 0.2936,
|
|
"step": 810
|
|
},
|
|
{
|
|
"epoch": 63.08,
|
|
"grad_norm": 0.8634017705917358,
|
|
"learning_rate": 9.358995815899582e-05,
|
|
"loss": 0.3182,
|
|
"step": 820
|
|
},
|
|
{
|
|
"epoch": 63.88,
|
|
"grad_norm": 0.5454868078231812,
|
|
"learning_rate": 9.350627615062762e-05,
|
|
"loss": 0.3117,
|
|
"step": 830
|
|
},
|
|
{
|
|
"epoch": 64.64,
|
|
"grad_norm": 0.9895418286323547,
|
|
"learning_rate": 9.342259414225943e-05,
|
|
"loss": 0.3014,
|
|
"step": 840
|
|
},
|
|
{
|
|
"epoch": 65.4,
|
|
"grad_norm": 1.4120994806289673,
|
|
"learning_rate": 9.333891213389122e-05,
|
|
"loss": 0.3001,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 66.16,
|
|
"grad_norm": 0.6670629978179932,
|
|
"learning_rate": 9.325523012552302e-05,
|
|
"loss": 0.2651,
|
|
"step": 860
|
|
},
|
|
{
|
|
"epoch": 66.96,
|
|
"grad_norm": 0.6429235339164734,
|
|
"learning_rate": 9.317154811715482e-05,
|
|
"loss": 0.2935,
|
|
"step": 870
|
|
},
|
|
{
|
|
"epoch": 67.72,
|
|
"grad_norm": 0.6127046942710876,
|
|
"learning_rate": 9.308786610878662e-05,
|
|
"loss": 0.2936,
|
|
"step": 880
|
|
},
|
|
{
|
|
"epoch": 68.48,
|
|
"grad_norm": 0.6747534275054932,
|
|
"learning_rate": 9.300418410041841e-05,
|
|
"loss": 0.2557,
|
|
"step": 890
|
|
},
|
|
{
|
|
"epoch": 69.24,
|
|
"grad_norm": 0.8579817414283752,
|
|
"learning_rate": 9.292050209205021e-05,
|
|
"loss": 0.3045,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 70.0,
|
|
"grad_norm": 0.24220693111419678,
|
|
"learning_rate": 9.283682008368201e-05,
|
|
"loss": 0.2818,
|
|
"step": 910
|
|
},
|
|
{
|
|
"epoch": 70.8,
|
|
"grad_norm": 0.5585035681724548,
|
|
"learning_rate": 9.275313807531382e-05,
|
|
"loss": 0.2579,
|
|
"step": 920
|
|
},
|
|
{
|
|
"epoch": 71.56,
|
|
"grad_norm": 0.5965076684951782,
|
|
"learning_rate": 9.266945606694562e-05,
|
|
"loss": 0.3582,
|
|
"step": 930
|
|
},
|
|
{
|
|
"epoch": 72.32,
|
|
"grad_norm": 0.4647338092327118,
|
|
"learning_rate": 9.258577405857742e-05,
|
|
"loss": 0.2384,
|
|
"step": 940
|
|
},
|
|
{
|
|
"epoch": 73.08,
|
|
"grad_norm": 0.4868353605270386,
|
|
"learning_rate": 9.250209205020921e-05,
|
|
"loss": 0.3097,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 73.88,
|
|
"grad_norm": 0.4390128552913666,
|
|
"learning_rate": 9.241841004184101e-05,
|
|
"loss": 0.2722,
|
|
"step": 960
|
|
},
|
|
{
|
|
"epoch": 74.64,
|
|
"grad_norm": 1.2558406591415405,
|
|
"learning_rate": 9.233472803347281e-05,
|
|
"loss": 0.2546,
|
|
"step": 970
|
|
},
|
|
{
|
|
"epoch": 75.4,
|
|
"grad_norm": 1.468024492263794,
|
|
"learning_rate": 9.225104602510461e-05,
|
|
"loss": 0.3539,
|
|
"step": 980
|
|
},
|
|
{
|
|
"epoch": 76.16,
|
|
"grad_norm": 1.3816922903060913,
|
|
"learning_rate": 9.21673640167364e-05,
|
|
"loss": 0.3131,
|
|
"step": 990
|
|
},
|
|
{
|
|
"epoch": 76.96,
|
|
"grad_norm": 1.0851378440856934,
|
|
"learning_rate": 9.208368200836822e-05,
|
|
"loss": 0.3418,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 77.72,
|
|
"grad_norm": 0.6077636480331421,
|
|
"learning_rate": 9.200000000000001e-05,
|
|
"loss": 0.301,
|
|
"step": 1010
|
|
},
|
|
{
|
|
"epoch": 78.48,
|
|
"grad_norm": 0.39989063143730164,
|
|
"learning_rate": 9.191631799163181e-05,
|
|
"loss": 0.2551,
|
|
"step": 1020
|
|
},
|
|
{
|
|
"epoch": 79.24,
|
|
"grad_norm": 0.5581826567649841,
|
|
"learning_rate": 9.183263598326361e-05,
|
|
"loss": 0.2955,
|
|
"step": 1030
|
|
},
|
|
{
|
|
"epoch": 80.0,
|
|
"grad_norm": 0.42415767908096313,
|
|
"learning_rate": 9.17489539748954e-05,
|
|
"loss": 0.3024,
|
|
"step": 1040
|
|
},
|
|
{
|
|
"epoch": 80.8,
|
|
"grad_norm": 0.431111603975296,
|
|
"learning_rate": 9.16652719665272e-05,
|
|
"loss": 0.3108,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 81.56,
|
|
"grad_norm": 0.45482027530670166,
|
|
"learning_rate": 9.1581589958159e-05,
|
|
"loss": 0.25,
|
|
"step": 1060
|
|
},
|
|
{
|
|
"epoch": 82.32,
|
|
"grad_norm": 0.6043058633804321,
|
|
"learning_rate": 9.14979079497908e-05,
|
|
"loss": 0.3466,
|
|
"step": 1070
|
|
},
|
|
{
|
|
"epoch": 83.08,
|
|
"grad_norm": 0.7600142955780029,
|
|
"learning_rate": 9.14142259414226e-05,
|
|
"loss": 0.2888,
|
|
"step": 1080
|
|
},
|
|
{
|
|
"epoch": 83.88,
|
|
"grad_norm": 0.36540111899375916,
|
|
"learning_rate": 9.133054393305441e-05,
|
|
"loss": 0.2896,
|
|
"step": 1090
|
|
},
|
|
{
|
|
"epoch": 84.64,
|
|
"grad_norm": 1.1366398334503174,
|
|
"learning_rate": 9.12468619246862e-05,
|
|
"loss": 0.3023,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 85.4,
|
|
"grad_norm": 0.646086573600769,
|
|
"learning_rate": 9.1163179916318e-05,
|
|
"loss": 0.2462,
|
|
"step": 1110
|
|
},
|
|
{
|
|
"epoch": 86.16,
|
|
"grad_norm": 0.6224349141120911,
|
|
"learning_rate": 9.10794979079498e-05,
|
|
"loss": 0.2741,
|
|
"step": 1120
|
|
},
|
|
{
|
|
"epoch": 86.96,
|
|
"grad_norm": 0.8657971024513245,
|
|
"learning_rate": 9.09958158995816e-05,
|
|
"loss": 0.3016,
|
|
"step": 1130
|
|
},
|
|
{
|
|
"epoch": 87.72,
|
|
"grad_norm": 0.86732017993927,
|
|
"learning_rate": 9.09121338912134e-05,
|
|
"loss": 0.263,
|
|
"step": 1140
|
|
},
|
|
{
|
|
"epoch": 88.48,
|
|
"grad_norm": 0.8562549948692322,
|
|
"learning_rate": 9.08284518828452e-05,
|
|
"loss": 0.2726,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 89.24,
|
|
"grad_norm": 0.5194992423057556,
|
|
"learning_rate": 9.074476987447699e-05,
|
|
"loss": 0.3161,
|
|
"step": 1160
|
|
},
|
|
{
|
|
"epoch": 90.0,
|
|
"grad_norm": 0.3380357027053833,
|
|
"learning_rate": 9.066108786610879e-05,
|
|
"loss": 0.26,
|
|
"step": 1170
|
|
},
|
|
{
|
|
"epoch": 90.8,
|
|
"grad_norm": 0.4834354519844055,
|
|
"learning_rate": 9.057740585774059e-05,
|
|
"loss": 0.2901,
|
|
"step": 1180
|
|
},
|
|
{
|
|
"epoch": 91.56,
|
|
"grad_norm": 0.7634447813034058,
|
|
"learning_rate": 9.04937238493724e-05,
|
|
"loss": 0.2471,
|
|
"step": 1190
|
|
},
|
|
{
|
|
"epoch": 92.32,
|
|
"grad_norm": 0.5605065822601318,
|
|
"learning_rate": 9.04100418410042e-05,
|
|
"loss": 0.3091,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 93.08,
|
|
"grad_norm": 0.6867684721946716,
|
|
"learning_rate": 9.0326359832636e-05,
|
|
"loss": 0.292,
|
|
"step": 1210
|
|
},
|
|
{
|
|
"epoch": 93.88,
|
|
"grad_norm": 0.5395390391349792,
|
|
"learning_rate": 9.024267782426779e-05,
|
|
"loss": 0.2727,
|
|
"step": 1220
|
|
},
|
|
{
|
|
"epoch": 94.64,
|
|
"grad_norm": 0.8648020029067993,
|
|
"learning_rate": 9.015899581589959e-05,
|
|
"loss": 0.3273,
|
|
"step": 1230
|
|
},
|
|
{
|
|
"epoch": 95.4,
|
|
"grad_norm": 0.5256586074829102,
|
|
"learning_rate": 9.007531380753139e-05,
|
|
"loss": 0.2611,
|
|
"step": 1240
|
|
},
|
|
{
|
|
"epoch": 96.16,
|
|
"grad_norm": 0.726409375667572,
|
|
"learning_rate": 8.999163179916318e-05,
|
|
"loss": 0.2993,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 96.96,
|
|
"grad_norm": 0.5897384285926819,
|
|
"learning_rate": 8.990794979079498e-05,
|
|
"loss": 0.2628,
|
|
"step": 1260
|
|
},
|
|
{
|
|
"epoch": 97.72,
|
|
"grad_norm": 0.3650963306427002,
|
|
"learning_rate": 8.982426778242678e-05,
|
|
"loss": 0.2975,
|
|
"step": 1270
|
|
},
|
|
{
|
|
"epoch": 98.48,
|
|
"grad_norm": 0.6548069715499878,
|
|
"learning_rate": 8.974058577405858e-05,
|
|
"loss": 0.2732,
|
|
"step": 1280
|
|
},
|
|
{
|
|
"epoch": 99.24,
|
|
"grad_norm": 0.6239974498748779,
|
|
"learning_rate": 8.965690376569037e-05,
|
|
"loss": 0.3471,
|
|
"step": 1290
|
|
},
|
|
{
|
|
"epoch": 100.0,
|
|
"grad_norm": 0.00010844325879588723,
|
|
"learning_rate": 8.957322175732217e-05,
|
|
"loss": 0.2865,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 100.8,
|
|
"grad_norm": 55.954044342041016,
|
|
"learning_rate": 8.94979079497908e-05,
|
|
"loss": 2.0083,
|
|
"step": 1310
|
|
},
|
|
{
|
|
"epoch": 101.56,
|
|
"grad_norm": 1.1408623456954956,
|
|
"learning_rate": 8.94142259414226e-05,
|
|
"loss": 0.7388,
|
|
"step": 1320
|
|
},
|
|
{
|
|
"epoch": 102.32,
|
|
"grad_norm": 0.7127572298049927,
|
|
"learning_rate": 8.93305439330544e-05,
|
|
"loss": 0.2341,
|
|
"step": 1330
|
|
},
|
|
{
|
|
"epoch": 103.08,
|
|
"grad_norm": 188.80079650878906,
|
|
"learning_rate": 8.92468619246862e-05,
|
|
"loss": 0.8824,
|
|
"step": 1340
|
|
},
|
|
{
|
|
"epoch": 103.88,
|
|
"grad_norm": 22.78482437133789,
|
|
"learning_rate": 8.9163179916318e-05,
|
|
"loss": 0.6129,
|
|
"step": 1350
|
|
},
|
|
{
|
|
"epoch": 104.64,
|
|
"grad_norm": 1.2376023530960083,
|
|
"learning_rate": 8.90794979079498e-05,
|
|
"loss": 0.4303,
|
|
"step": 1360
|
|
},
|
|
{
|
|
"epoch": 105.4,
|
|
"grad_norm": 1.032791018486023,
|
|
"learning_rate": 8.899581589958159e-05,
|
|
"loss": 0.3326,
|
|
"step": 1370
|
|
},
|
|
{
|
|
"epoch": 106.16,
|
|
"grad_norm": 0.6220753192901611,
|
|
"learning_rate": 8.891213389121339e-05,
|
|
"loss": 0.3165,
|
|
"step": 1380
|
|
},
|
|
{
|
|
"epoch": 106.96,
|
|
"grad_norm": 0.9835271835327148,
|
|
"learning_rate": 8.882845188284519e-05,
|
|
"loss": 0.2899,
|
|
"step": 1390
|
|
},
|
|
{
|
|
"epoch": 107.72,
|
|
"grad_norm": 0.4846683144569397,
|
|
"learning_rate": 8.8744769874477e-05,
|
|
"loss": 0.3234,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 108.48,
|
|
"grad_norm": 42.94027328491211,
|
|
"learning_rate": 8.86610878661088e-05,
|
|
"loss": 0.2739,
|
|
"step": 1410
|
|
},
|
|
{
|
|
"epoch": 109.24,
|
|
"grad_norm": 0.42791783809661865,
|
|
"learning_rate": 8.857740585774059e-05,
|
|
"loss": 0.3374,
|
|
"step": 1420
|
|
},
|
|
{
|
|
"epoch": 110.0,
|
|
"grad_norm": 0.5355058312416077,
|
|
"learning_rate": 8.849372384937239e-05,
|
|
"loss": 0.2941,
|
|
"step": 1430
|
|
},
|
|
{
|
|
"epoch": 110.8,
|
|
"grad_norm": 0.43855488300323486,
|
|
"learning_rate": 8.841004184100419e-05,
|
|
"loss": 0.254,
|
|
"step": 1440
|
|
},
|
|
{
|
|
"epoch": 111.56,
|
|
"grad_norm": 0.6513474583625793,
|
|
"learning_rate": 8.832635983263599e-05,
|
|
"loss": 0.2992,
|
|
"step": 1450
|
|
},
|
|
{
|
|
"epoch": 112.32,
|
|
"grad_norm": 0.5990163087844849,
|
|
"learning_rate": 8.824267782426778e-05,
|
|
"loss": 0.2838,
|
|
"step": 1460
|
|
},
|
|
{
|
|
"epoch": 113.08,
|
|
"grad_norm": 0.5194154381752014,
|
|
"learning_rate": 8.815899581589958e-05,
|
|
"loss": 0.2896,
|
|
"step": 1470
|
|
},
|
|
{
|
|
"epoch": 113.88,
|
|
"grad_norm": 0.90041583776474,
|
|
"learning_rate": 8.807531380753139e-05,
|
|
"loss": 0.2853,
|
|
"step": 1480
|
|
},
|
|
{
|
|
"epoch": 114.64,
|
|
"grad_norm": 0.46713006496429443,
|
|
"learning_rate": 8.799163179916319e-05,
|
|
"loss": 0.341,
|
|
"step": 1490
|
|
},
|
|
{
|
|
"epoch": 115.4,
|
|
"grad_norm": 1.0562089681625366,
|
|
"learning_rate": 8.790794979079499e-05,
|
|
"loss": 0.2485,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 116.16,
|
|
"grad_norm": 0.7914270162582397,
|
|
"learning_rate": 8.782426778242678e-05,
|
|
"loss": 0.2931,
|
|
"step": 1510
|
|
},
|
|
{
|
|
"epoch": 116.96,
|
|
"grad_norm": 0.7104222178459167,
|
|
"learning_rate": 8.774058577405858e-05,
|
|
"loss": 0.3166,
|
|
"step": 1520
|
|
},
|
|
{
|
|
"epoch": 117.72,
|
|
"grad_norm": 0.6477789878845215,
|
|
"learning_rate": 8.765690376569038e-05,
|
|
"loss": 0.2712,
|
|
"step": 1530
|
|
},
|
|
{
|
|
"epoch": 118.48,
|
|
"grad_norm": 0.2977544069290161,
|
|
"learning_rate": 8.757322175732218e-05,
|
|
"loss": 0.2537,
|
|
"step": 1540
|
|
},
|
|
{
|
|
"epoch": 119.24,
|
|
"grad_norm": 0.6447045803070068,
|
|
"learning_rate": 8.748953974895398e-05,
|
|
"loss": 0.2848,
|
|
"step": 1550
|
|
},
|
|
{
|
|
"epoch": 120.0,
|
|
"grad_norm": 0.4993550479412079,
|
|
"learning_rate": 8.740585774058579e-05,
|
|
"loss": 0.2712,
|
|
"step": 1560
|
|
},
|
|
{
|
|
"epoch": 120.8,
|
|
"grad_norm": 0.28479063510894775,
|
|
"learning_rate": 8.732217573221758e-05,
|
|
"loss": 0.2969,
|
|
"step": 1570
|
|
},
|
|
{
|
|
"epoch": 121.56,
|
|
"grad_norm": 0.7489855885505676,
|
|
"learning_rate": 8.723849372384938e-05,
|
|
"loss": 0.2512,
|
|
"step": 1580
|
|
},
|
|
{
|
|
"epoch": 122.32,
|
|
"grad_norm": 0.6503575444221497,
|
|
"learning_rate": 8.715481171548118e-05,
|
|
"loss": 0.268,
|
|
"step": 1590
|
|
},
|
|
{
|
|
"epoch": 123.08,
|
|
"grad_norm": 0.5870686769485474,
|
|
"learning_rate": 8.707112970711298e-05,
|
|
"loss": 0.302,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 123.88,
|
|
"grad_norm": 0.8388033509254456,
|
|
"learning_rate": 8.698744769874477e-05,
|
|
"loss": 0.2784,
|
|
"step": 1610
|
|
},
|
|
{
|
|
"epoch": 124.64,
|
|
"grad_norm": 0.7110853791236877,
|
|
"learning_rate": 8.690376569037657e-05,
|
|
"loss": 0.2576,
|
|
"step": 1620
|
|
},
|
|
{
|
|
"epoch": 125.4,
|
|
"grad_norm": 0.6697489619255066,
|
|
"learning_rate": 8.682008368200837e-05,
|
|
"loss": 0.2863,
|
|
"step": 1630
|
|
},
|
|
{
|
|
"epoch": 126.16,
|
|
"grad_norm": 0.6678580045700073,
|
|
"learning_rate": 8.673640167364017e-05,
|
|
"loss": 0.2945,
|
|
"step": 1640
|
|
},
|
|
{
|
|
"epoch": 126.96,
|
|
"grad_norm": 0.5099469423294067,
|
|
"learning_rate": 8.665271966527198e-05,
|
|
"loss": 0.2744,
|
|
"step": 1650
|
|
},
|
|
{
|
|
"epoch": 127.72,
|
|
"grad_norm": 0.8461157083511353,
|
|
"learning_rate": 8.656903765690378e-05,
|
|
"loss": 0.2881,
|
|
"step": 1660
|
|
},
|
|
{
|
|
"epoch": 128.48,
|
|
"grad_norm": 0.5801335573196411,
|
|
"learning_rate": 8.648535564853557e-05,
|
|
"loss": 0.2743,
|
|
"step": 1670
|
|
},
|
|
{
|
|
"epoch": 129.24,
|
|
"grad_norm": 0.00013900638441555202,
|
|
"learning_rate": 8.640167364016737e-05,
|
|
"loss": 0.2389,
|
|
"step": 1680
|
|
},
|
|
{
|
|
"epoch": 130.0,
|
|
"grad_norm": 0.8893792033195496,
|
|
"learning_rate": 8.631799163179917e-05,
|
|
"loss": 0.3059,
|
|
"step": 1690
|
|
},
|
|
{
|
|
"epoch": 130.8,
|
|
"grad_norm": 0.5562736392021179,
|
|
"learning_rate": 8.623430962343097e-05,
|
|
"loss": 0.2586,
|
|
"step": 1700
|
|
},
|
|
{
|
|
"epoch": 131.56,
|
|
"grad_norm": 0.41472992300987244,
|
|
"learning_rate": 8.615062761506276e-05,
|
|
"loss": 0.2632,
|
|
"step": 1710
|
|
},
|
|
{
|
|
"epoch": 132.32,
|
|
"grad_norm": 0.23705990612506866,
|
|
"learning_rate": 8.606694560669456e-05,
|
|
"loss": 0.2678,
|
|
"step": 1720
|
|
},
|
|
{
|
|
"epoch": 133.08,
|
|
"grad_norm": 0.6223066449165344,
|
|
"learning_rate": 8.598326359832637e-05,
|
|
"loss": 0.3278,
|
|
"step": 1730
|
|
},
|
|
{
|
|
"epoch": 133.88,
|
|
"grad_norm": 0.7489154934883118,
|
|
"learning_rate": 8.589958158995817e-05,
|
|
"loss": 0.3094,
|
|
"step": 1740
|
|
},
|
|
{
|
|
"epoch": 134.64,
|
|
"grad_norm": 0.0001321160380030051,
|
|
"learning_rate": 8.581589958158997e-05,
|
|
"loss": 0.2386,
|
|
"step": 1750
|
|
},
|
|
{
|
|
"epoch": 135.4,
|
|
"grad_norm": 0.6472476720809937,
|
|
"learning_rate": 8.573221757322177e-05,
|
|
"loss": 0.319,
|
|
"step": 1760
|
|
},
|
|
{
|
|
"epoch": 136.16,
|
|
"grad_norm": 0.6706916689872742,
|
|
"learning_rate": 8.564853556485356e-05,
|
|
"loss": 0.2565,
|
|
"step": 1770
|
|
},
|
|
{
|
|
"epoch": 136.96,
|
|
"grad_norm": 0.6396327614784241,
|
|
"learning_rate": 8.556485355648536e-05,
|
|
"loss": 0.3257,
|
|
"step": 1780
|
|
},
|
|
{
|
|
"epoch": 137.72,
|
|
"grad_norm": 0.945931613445282,
|
|
"learning_rate": 8.548117154811716e-05,
|
|
"loss": 0.2912,
|
|
"step": 1790
|
|
},
|
|
{
|
|
"epoch": 138.48,
|
|
"grad_norm": 0.4821135699748993,
|
|
"learning_rate": 8.539748953974896e-05,
|
|
"loss": 0.239,
|
|
"step": 1800
|
|
},
|
|
{
|
|
"epoch": 139.24,
|
|
"grad_norm": 0.5348692536354065,
|
|
"learning_rate": 8.531380753138077e-05,
|
|
"loss": 0.2632,
|
|
"step": 1810
|
|
},
|
|
{
|
|
"epoch": 140.0,
|
|
"grad_norm": 0.3767673671245575,
|
|
"learning_rate": 8.523012552301257e-05,
|
|
"loss": 0.2752,
|
|
"step": 1820
|
|
},
|
|
{
|
|
"epoch": 140.8,
|
|
"grad_norm": 0.5291851758956909,
|
|
"learning_rate": 8.514644351464436e-05,
|
|
"loss": 0.2997,
|
|
"step": 1830
|
|
},
|
|
{
|
|
"epoch": 141.56,
|
|
"grad_norm": 0.6979625225067139,
|
|
"learning_rate": 8.506276150627616e-05,
|
|
"loss": 0.2731,
|
|
"step": 1840
|
|
},
|
|
{
|
|
"epoch": 142.32,
|
|
"grad_norm": 0.2552458941936493,
|
|
"learning_rate": 8.497907949790796e-05,
|
|
"loss": 0.2046,
|
|
"step": 1850
|
|
},
|
|
{
|
|
"epoch": 143.08,
|
|
"grad_norm": 0.7138965725898743,
|
|
"learning_rate": 8.489539748953976e-05,
|
|
"loss": 0.3448,
|
|
"step": 1860
|
|
},
|
|
{
|
|
"epoch": 143.88,
|
|
"grad_norm": 0.602065920829773,
|
|
"learning_rate": 8.481171548117155e-05,
|
|
"loss": 0.3201,
|
|
"step": 1870
|
|
},
|
|
{
|
|
"epoch": 144.64,
|
|
"grad_norm": 0.8118859529495239,
|
|
"learning_rate": 8.472803347280335e-05,
|
|
"loss": 0.2622,
|
|
"step": 1880
|
|
},
|
|
{
|
|
"epoch": 145.4,
|
|
"grad_norm": 0.00017894129268825054,
|
|
"learning_rate": 8.464435146443515e-05,
|
|
"loss": 0.2388,
|
|
"step": 1890
|
|
},
|
|
{
|
|
"epoch": 146.16,
|
|
"grad_norm": 0.5059804320335388,
|
|
"learning_rate": 8.456066945606696e-05,
|
|
"loss": 0.3429,
|
|
"step": 1900
|
|
},
|
|
{
|
|
"epoch": 146.96,
|
|
"grad_norm": 0.47752124071121216,
|
|
"learning_rate": 8.447698744769876e-05,
|
|
"loss": 0.2575,
|
|
"step": 1910
|
|
},
|
|
{
|
|
"epoch": 147.72,
|
|
"grad_norm": 0.6666246056556702,
|
|
"learning_rate": 8.439330543933056e-05,
|
|
"loss": 0.284,
|
|
"step": 1920
|
|
},
|
|
{
|
|
"epoch": 148.48,
|
|
"grad_norm": 0.6572575569152832,
|
|
"learning_rate": 8.430962343096235e-05,
|
|
"loss": 0.2432,
|
|
"step": 1930
|
|
},
|
|
{
|
|
"epoch": 149.24,
|
|
"grad_norm": 0.644818902015686,
|
|
"learning_rate": 8.422594142259415e-05,
|
|
"loss": 0.2844,
|
|
"step": 1940
|
|
},
|
|
{
|
|
"epoch": 150.0,
|
|
"grad_norm": 1.5558511018753052,
|
|
"learning_rate": 8.414225941422595e-05,
|
|
"loss": 0.2891,
|
|
"step": 1950
|
|
},
|
|
{
|
|
"epoch": 150.8,
|
|
"grad_norm": 0.9465558528900146,
|
|
"learning_rate": 8.405857740585775e-05,
|
|
"loss": 0.2765,
|
|
"step": 1960
|
|
},
|
|
{
|
|
"epoch": 151.56,
|
|
"grad_norm": 0.8076108694076538,
|
|
"learning_rate": 8.397489539748954e-05,
|
|
"loss": 0.2718,
|
|
"step": 1970
|
|
},
|
|
{
|
|
"epoch": 152.32,
|
|
"grad_norm": 0.6895241141319275,
|
|
"learning_rate": 8.389121338912134e-05,
|
|
"loss": 0.2662,
|
|
"step": 1980
|
|
},
|
|
{
|
|
"epoch": 153.08,
|
|
"grad_norm": 0.42998993396759033,
|
|
"learning_rate": 8.380753138075314e-05,
|
|
"loss": 0.2715,
|
|
"step": 1990
|
|
},
|
|
{
|
|
"epoch": 153.88,
|
|
"grad_norm": 0.5157560706138611,
|
|
"learning_rate": 8.372384937238494e-05,
|
|
"loss": 0.2677,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 154.64,
|
|
"grad_norm": 0.5690245628356934,
|
|
"learning_rate": 8.364016736401675e-05,
|
|
"loss": 0.2882,
|
|
"step": 2010
|
|
},
|
|
{
|
|
"epoch": 155.4,
|
|
"grad_norm": 0.5015438199043274,
|
|
"learning_rate": 8.355648535564855e-05,
|
|
"loss": 0.2652,
|
|
"step": 2020
|
|
},
|
|
{
|
|
"epoch": 156.16,
|
|
"grad_norm": 0.3558717370033264,
|
|
"learning_rate": 8.347280334728034e-05,
|
|
"loss": 0.2938,
|
|
"step": 2030
|
|
},
|
|
{
|
|
"epoch": 156.96,
|
|
"grad_norm": 0.8188201189041138,
|
|
"learning_rate": 8.338912133891214e-05,
|
|
"loss": 0.2977,
|
|
"step": 2040
|
|
},
|
|
{
|
|
"epoch": 157.72,
|
|
"grad_norm": 0.8057423830032349,
|
|
"learning_rate": 8.330543933054394e-05,
|
|
"loss": 0.2342,
|
|
"step": 2050
|
|
},
|
|
{
|
|
"epoch": 158.48,
|
|
"grad_norm": 0.4321524500846863,
|
|
"learning_rate": 8.322175732217574e-05,
|
|
"loss": 0.2955,
|
|
"step": 2060
|
|
},
|
|
{
|
|
"epoch": 159.24,
|
|
"grad_norm": 0.5147210955619812,
|
|
"learning_rate": 8.313807531380753e-05,
|
|
"loss": 0.2526,
|
|
"step": 2070
|
|
},
|
|
{
|
|
"epoch": 160.0,
|
|
"grad_norm": 0.771668016910553,
|
|
"learning_rate": 8.305439330543933e-05,
|
|
"loss": 0.3067,
|
|
"step": 2080
|
|
},
|
|
{
|
|
"epoch": 160.8,
|
|
"grad_norm": 0.6141147017478943,
|
|
"learning_rate": 8.297071129707113e-05,
|
|
"loss": 0.2733,
|
|
"step": 2090
|
|
},
|
|
{
|
|
"epoch": 161.56,
|
|
"grad_norm": 1.0377253293991089,
|
|
"learning_rate": 8.288702928870293e-05,
|
|
"loss": 0.2909,
|
|
"step": 2100
|
|
},
|
|
{
|
|
"epoch": 162.32,
|
|
"grad_norm": 0.3286207318305969,
|
|
"learning_rate": 8.280334728033472e-05,
|
|
"loss": 0.2491,
|
|
"step": 2110
|
|
},
|
|
{
|
|
"epoch": 163.08,
|
|
"grad_norm": 0.5966292023658752,
|
|
"learning_rate": 8.271966527196652e-05,
|
|
"loss": 0.2761,
|
|
"step": 2120
|
|
},
|
|
{
|
|
"epoch": 163.88,
|
|
"grad_norm": 0.7708263993263245,
|
|
"learning_rate": 8.263598326359832e-05,
|
|
"loss": 0.2874,
|
|
"step": 2130
|
|
},
|
|
{
|
|
"epoch": 164.64,
|
|
"grad_norm": 0.6249658465385437,
|
|
"learning_rate": 8.255230125523013e-05,
|
|
"loss": 0.2681,
|
|
"step": 2140
|
|
},
|
|
{
|
|
"epoch": 165.4,
|
|
"grad_norm": 0.4472973048686981,
|
|
"learning_rate": 8.246861924686193e-05,
|
|
"loss": 0.2753,
|
|
"step": 2150
|
|
},
|
|
{
|
|
"epoch": 166.16,
|
|
"grad_norm": 0.7961902022361755,
|
|
"learning_rate": 8.238493723849373e-05,
|
|
"loss": 0.2798,
|
|
"step": 2160
|
|
},
|
|
{
|
|
"epoch": 166.96,
|
|
"grad_norm": 0.48364782333374023,
|
|
"learning_rate": 8.230125523012552e-05,
|
|
"loss": 0.2924,
|
|
"step": 2170
|
|
},
|
|
{
|
|
"epoch": 167.72,
|
|
"grad_norm": 0.5379577279090881,
|
|
"learning_rate": 8.221757322175732e-05,
|
|
"loss": 0.2509,
|
|
"step": 2180
|
|
},
|
|
{
|
|
"epoch": 168.48,
|
|
"grad_norm": 0.7065776586532593,
|
|
"learning_rate": 8.213389121338912e-05,
|
|
"loss": 0.2763,
|
|
"step": 2190
|
|
},
|
|
{
|
|
"epoch": 169.24,
|
|
"grad_norm": 0.664776086807251,
|
|
"learning_rate": 8.205020920502092e-05,
|
|
"loss": 0.2726,
|
|
"step": 2200
|
|
},
|
|
{
|
|
"epoch": 170.0,
|
|
"grad_norm": 0.47445446252822876,
|
|
"learning_rate": 8.196652719665271e-05,
|
|
"loss": 0.2696,
|
|
"step": 2210
|
|
},
|
|
{
|
|
"epoch": 170.8,
|
|
"grad_norm": 0.5877684354782104,
|
|
"learning_rate": 8.188284518828451e-05,
|
|
"loss": 0.3092,
|
|
"step": 2220
|
|
},
|
|
{
|
|
"epoch": 171.56,
|
|
"grad_norm": 0.8296095728874207,
|
|
"learning_rate": 8.179916317991632e-05,
|
|
"loss": 0.2458,
|
|
"step": 2230
|
|
},
|
|
{
|
|
"epoch": 172.32,
|
|
"grad_norm": 0.2947175204753876,
|
|
"learning_rate": 8.171548117154812e-05,
|
|
"loss": 0.245,
|
|
"step": 2240
|
|
},
|
|
{
|
|
"epoch": 173.08,
|
|
"grad_norm": 0.6972278952598572,
|
|
"learning_rate": 8.163179916317992e-05,
|
|
"loss": 0.3018,
|
|
"step": 2250
|
|
},
|
|
{
|
|
"epoch": 173.88,
|
|
"grad_norm": 0.36442530155181885,
|
|
"learning_rate": 8.154811715481172e-05,
|
|
"loss": 0.2875,
|
|
"step": 2260
|
|
},
|
|
{
|
|
"epoch": 174.64,
|
|
"grad_norm": 0.38456207513809204,
|
|
"learning_rate": 8.146443514644351e-05,
|
|
"loss": 0.2328,
|
|
"step": 2270
|
|
},
|
|
{
|
|
"epoch": 175.4,
|
|
"grad_norm": 0.573784589767456,
|
|
"learning_rate": 8.138075313807531e-05,
|
|
"loss": 0.2851,
|
|
"step": 2280
|
|
},
|
|
{
|
|
"epoch": 176.16,
|
|
"grad_norm": 0.5698201656341553,
|
|
"learning_rate": 8.129707112970711e-05,
|
|
"loss": 0.2807,
|
|
"step": 2290
|
|
},
|
|
{
|
|
"epoch": 176.96,
|
|
"grad_norm": 0.7705904841423035,
|
|
"learning_rate": 8.121338912133891e-05,
|
|
"loss": 0.2982,
|
|
"step": 2300
|
|
},
|
|
{
|
|
"epoch": 177.72,
|
|
"grad_norm": 0.5071648359298706,
|
|
"learning_rate": 8.11297071129707e-05,
|
|
"loss": 0.2127,
|
|
"step": 2310
|
|
},
|
|
{
|
|
"epoch": 178.48,
|
|
"grad_norm": 1.354844331741333,
|
|
"learning_rate": 8.104602510460252e-05,
|
|
"loss": 0.2938,
|
|
"step": 2320
|
|
},
|
|
{
|
|
"epoch": 179.24,
|
|
"grad_norm": 0.4220433533191681,
|
|
"learning_rate": 8.096234309623431e-05,
|
|
"loss": 0.241,
|
|
"step": 2330
|
|
},
|
|
{
|
|
"epoch": 180.0,
|
|
"grad_norm": 0.4945012331008911,
|
|
"learning_rate": 8.087866108786611e-05,
|
|
"loss": 0.2482,
|
|
"step": 2340
|
|
},
|
|
{
|
|
"epoch": 180.8,
|
|
"grad_norm": 0.6846901774406433,
|
|
"learning_rate": 8.079497907949791e-05,
|
|
"loss": 0.2146,
|
|
"step": 2350
|
|
},
|
|
{
|
|
"epoch": 181.56,
|
|
"grad_norm": 0.6438813209533691,
|
|
"learning_rate": 8.07112970711297e-05,
|
|
"loss": 0.252,
|
|
"step": 2360
|
|
},
|
|
{
|
|
"epoch": 182.32,
|
|
"grad_norm": 0.486453652381897,
|
|
"learning_rate": 8.06276150627615e-05,
|
|
"loss": 0.2417,
|
|
"step": 2370
|
|
},
|
|
{
|
|
"epoch": 183.08,
|
|
"grad_norm": 0.5143964290618896,
|
|
"learning_rate": 8.05439330543933e-05,
|
|
"loss": 0.2882,
|
|
"step": 2380
|
|
},
|
|
{
|
|
"epoch": 183.88,
|
|
"grad_norm": 1.5306233167648315,
|
|
"learning_rate": 8.04602510460251e-05,
|
|
"loss": 0.2624,
|
|
"step": 2390
|
|
},
|
|
{
|
|
"epoch": 184.64,
|
|
"grad_norm": 0.47094181180000305,
|
|
"learning_rate": 8.037656903765691e-05,
|
|
"loss": 0.2009,
|
|
"step": 2400
|
|
},
|
|
{
|
|
"epoch": 185.4,
|
|
"grad_norm": 0.510780930519104,
|
|
"learning_rate": 8.029288702928871e-05,
|
|
"loss": 0.3383,
|
|
"step": 2410
|
|
},
|
|
{
|
|
"epoch": 186.16,
|
|
"grad_norm": 1.601507306098938,
|
|
"learning_rate": 8.02092050209205e-05,
|
|
"loss": 0.2143,
|
|
"step": 2420
|
|
},
|
|
{
|
|
"epoch": 186.96,
|
|
"grad_norm": 0.5631161332130432,
|
|
"learning_rate": 8.01255230125523e-05,
|
|
"loss": 0.2093,
|
|
"step": 2430
|
|
},
|
|
{
|
|
"epoch": 187.72,
|
|
"grad_norm": 0.2900833189487457,
|
|
"learning_rate": 8.00418410041841e-05,
|
|
"loss": 0.1871,
|
|
"step": 2440
|
|
},
|
|
{
|
|
"epoch": 188.48,
|
|
"grad_norm": 0.6894598007202148,
|
|
"learning_rate": 7.99581589958159e-05,
|
|
"loss": 0.2654,
|
|
"step": 2450
|
|
},
|
|
{
|
|
"epoch": 189.24,
|
|
"grad_norm": 8.970075607299805,
|
|
"learning_rate": 7.98744769874477e-05,
|
|
"loss": 0.178,
|
|
"step": 2460
|
|
},
|
|
{
|
|
"epoch": 190.0,
|
|
"grad_norm": 0.2863423228263855,
|
|
"learning_rate": 7.97907949790795e-05,
|
|
"loss": 0.1958,
|
|
"step": 2470
|
|
},
|
|
{
|
|
"epoch": 190.8,
|
|
"grad_norm": 0.3206811547279358,
|
|
"learning_rate": 7.97071129707113e-05,
|
|
"loss": 0.2548,
|
|
"step": 2480
|
|
},
|
|
{
|
|
"epoch": 191.56,
|
|
"grad_norm": 0.6876190900802612,
|
|
"learning_rate": 7.96234309623431e-05,
|
|
"loss": 0.1843,
|
|
"step": 2490
|
|
},
|
|
{
|
|
"epoch": 192.32,
|
|
"grad_norm": 1.09211266040802,
|
|
"learning_rate": 7.95397489539749e-05,
|
|
"loss": 0.1721,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 193.08,
|
|
"grad_norm": 14.786331176757812,
|
|
"learning_rate": 7.94560669456067e-05,
|
|
"loss": 0.327,
|
|
"step": 2510
|
|
},
|
|
{
|
|
"epoch": 193.88,
|
|
"grad_norm": 43.85841369628906,
|
|
"learning_rate": 7.93723849372385e-05,
|
|
"loss": 0.3656,
|
|
"step": 2520
|
|
},
|
|
{
|
|
"epoch": 194.64,
|
|
"grad_norm": 0.32373273372650146,
|
|
"learning_rate": 7.92887029288703e-05,
|
|
"loss": 0.2476,
|
|
"step": 2530
|
|
},
|
|
{
|
|
"epoch": 195.4,
|
|
"grad_norm": 0.00038791695260442793,
|
|
"learning_rate": 7.920502092050209e-05,
|
|
"loss": 0.1761,
|
|
"step": 2540
|
|
},
|
|
{
|
|
"epoch": 196.16,
|
|
"grad_norm": 1.776816487312317,
|
|
"learning_rate": 7.912133891213389e-05,
|
|
"loss": 0.1828,
|
|
"step": 2550
|
|
},
|
|
{
|
|
"epoch": 196.96,
|
|
"grad_norm": 0.5436874628067017,
|
|
"learning_rate": 7.903765690376569e-05,
|
|
"loss": 0.2261,
|
|
"step": 2560
|
|
},
|
|
{
|
|
"epoch": 197.72,
|
|
"grad_norm": 0.45118626952171326,
|
|
"learning_rate": 7.89539748953975e-05,
|
|
"loss": 0.1712,
|
|
"step": 2570
|
|
},
|
|
{
|
|
"epoch": 198.48,
|
|
"grad_norm": 3.249994993209839,
|
|
"learning_rate": 7.88702928870293e-05,
|
|
"loss": 0.247,
|
|
"step": 2580
|
|
},
|
|
{
|
|
"epoch": 199.24,
|
|
"grad_norm": 1.1368451118469238,
|
|
"learning_rate": 7.878661087866109e-05,
|
|
"loss": 0.2208,
|
|
"step": 2590
|
|
},
|
|
{
|
|
"epoch": 200.0,
|
|
"grad_norm": 0.6388035416603088,
|
|
"learning_rate": 7.870292887029289e-05,
|
|
"loss": 0.1518,
|
|
"step": 2600
|
|
},
|
|
{
|
|
"epoch": 200.8,
|
|
"grad_norm": 3.905496597290039,
|
|
"learning_rate": 7.861924686192469e-05,
|
|
"loss": 0.1279,
|
|
"step": 2610
|
|
},
|
|
{
|
|
"epoch": 201.56,
|
|
"grad_norm": 27.926372528076172,
|
|
"learning_rate": 7.853556485355649e-05,
|
|
"loss": 0.4156,
|
|
"step": 2620
|
|
},
|
|
{
|
|
"epoch": 202.32,
|
|
"grad_norm": 0.19806312024593353,
|
|
"learning_rate": 7.845188284518828e-05,
|
|
"loss": 0.0903,
|
|
"step": 2630
|
|
},
|
|
{
|
|
"epoch": 203.08,
|
|
"grad_norm": 0.0014255548594519496,
|
|
"learning_rate": 7.836820083682008e-05,
|
|
"loss": 0.1962,
|
|
"step": 2640
|
|
},
|
|
{
|
|
"epoch": 203.88,
|
|
"grad_norm": 2.5122647285461426,
|
|
"learning_rate": 7.828451882845189e-05,
|
|
"loss": 0.1382,
|
|
"step": 2650
|
|
},
|
|
{
|
|
"epoch": 204.64,
|
|
"grad_norm": 5.140716075897217,
|
|
"learning_rate": 7.820083682008369e-05,
|
|
"loss": 0.2123,
|
|
"step": 2660
|
|
},
|
|
{
|
|
"epoch": 205.4,
|
|
"grad_norm": 1.3244779109954834,
|
|
"learning_rate": 7.811715481171549e-05,
|
|
"loss": 0.1756,
|
|
"step": 2670
|
|
},
|
|
{
|
|
"epoch": 206.16,
|
|
"grad_norm": 26.31413459777832,
|
|
"learning_rate": 7.803347280334728e-05,
|
|
"loss": 0.1425,
|
|
"step": 2680
|
|
},
|
|
{
|
|
"epoch": 206.96,
|
|
"grad_norm": 1.5801438093185425,
|
|
"learning_rate": 7.794979079497908e-05,
|
|
"loss": 0.1822,
|
|
"step": 2690
|
|
},
|
|
{
|
|
"epoch": 207.72,
|
|
"grad_norm": 1.6209617853164673,
|
|
"learning_rate": 7.786610878661088e-05,
|
|
"loss": 0.1909,
|
|
"step": 2700
|
|
},
|
|
{
|
|
"epoch": 208.48,
|
|
"grad_norm": 0.6468285918235779,
|
|
"learning_rate": 7.778242677824268e-05,
|
|
"loss": 0.1741,
|
|
"step": 2710
|
|
},
|
|
{
|
|
"epoch": 209.24,
|
|
"grad_norm": 0.04982404410839081,
|
|
"learning_rate": 7.769874476987448e-05,
|
|
"loss": 0.1027,
|
|
"step": 2720
|
|
},
|
|
{
|
|
"epoch": 210.0,
|
|
"grad_norm": 0.0010119529906660318,
|
|
"learning_rate": 7.761506276150629e-05,
|
|
"loss": 0.1457,
|
|
"step": 2730
|
|
},
|
|
{
|
|
"epoch": 210.8,
|
|
"grad_norm": 38.17147445678711,
|
|
"learning_rate": 7.753138075313808e-05,
|
|
"loss": 0.164,
|
|
"step": 2740
|
|
},
|
|
{
|
|
"epoch": 211.56,
|
|
"grad_norm": 1.5547058582305908,
|
|
"learning_rate": 7.744769874476988e-05,
|
|
"loss": 0.1102,
|
|
"step": 2750
|
|
},
|
|
{
|
|
"epoch": 212.32,
|
|
"grad_norm": 0.011813919991254807,
|
|
"learning_rate": 7.736401673640168e-05,
|
|
"loss": 0.1455,
|
|
"step": 2760
|
|
},
|
|
{
|
|
"epoch": 213.08,
|
|
"grad_norm": 0.007001452147960663,
|
|
"learning_rate": 7.728033472803348e-05,
|
|
"loss": 0.1694,
|
|
"step": 2770
|
|
},
|
|
{
|
|
"epoch": 213.88,
|
|
"grad_norm": 0.7564431428909302,
|
|
"learning_rate": 7.719665271966527e-05,
|
|
"loss": 0.2061,
|
|
"step": 2780
|
|
},
|
|
{
|
|
"epoch": 214.64,
|
|
"grad_norm": 1.1208900213241577,
|
|
"learning_rate": 7.711297071129707e-05,
|
|
"loss": 0.2951,
|
|
"step": 2790
|
|
},
|
|
{
|
|
"epoch": 215.4,
|
|
"grad_norm": 0.4398239552974701,
|
|
"learning_rate": 7.702928870292887e-05,
|
|
"loss": 0.1161,
|
|
"step": 2800
|
|
},
|
|
{
|
|
"epoch": 216.16,
|
|
"grad_norm": 0.34080252051353455,
|
|
"learning_rate": 7.694560669456067e-05,
|
|
"loss": 0.1177,
|
|
"step": 2810
|
|
},
|
|
{
|
|
"epoch": 216.96,
|
|
"grad_norm": 0.27304738759994507,
|
|
"learning_rate": 7.686192468619248e-05,
|
|
"loss": 0.117,
|
|
"step": 2820
|
|
},
|
|
{
|
|
"epoch": 217.72,
|
|
"grad_norm": 0.00020999423577450216,
|
|
"learning_rate": 7.677824267782428e-05,
|
|
"loss": 0.1518,
|
|
"step": 2830
|
|
},
|
|
{
|
|
"epoch": 218.48,
|
|
"grad_norm": 0.3551616072654724,
|
|
"learning_rate": 7.669456066945607e-05,
|
|
"loss": 0.0911,
|
|
"step": 2840
|
|
},
|
|
{
|
|
"epoch": 219.24,
|
|
"grad_norm": 18.045923233032227,
|
|
"learning_rate": 7.661087866108787e-05,
|
|
"loss": 0.1732,
|
|
"step": 2850
|
|
},
|
|
{
|
|
"epoch": 220.0,
|
|
"grad_norm": 0.0011837411439046264,
|
|
"learning_rate": 7.652719665271967e-05,
|
|
"loss": 0.3062,
|
|
"step": 2860
|
|
},
|
|
{
|
|
"epoch": 220.8,
|
|
"grad_norm": 7.931725025177002,
|
|
"learning_rate": 7.644351464435147e-05,
|
|
"loss": 0.1608,
|
|
"step": 2870
|
|
},
|
|
{
|
|
"epoch": 221.56,
|
|
"grad_norm": 0.7454537749290466,
|
|
"learning_rate": 7.635983263598326e-05,
|
|
"loss": 0.163,
|
|
"step": 2880
|
|
},
|
|
{
|
|
"epoch": 222.32,
|
|
"grad_norm": 6.1156840324401855,
|
|
"learning_rate": 7.627615062761506e-05,
|
|
"loss": 0.0693,
|
|
"step": 2890
|
|
},
|
|
{
|
|
"epoch": 223.08,
|
|
"grad_norm": 1.552299976348877,
|
|
"learning_rate": 7.619246861924687e-05,
|
|
"loss": 0.1688,
|
|
"step": 2900
|
|
},
|
|
{
|
|
"epoch": 223.88,
|
|
"grad_norm": 11.509157180786133,
|
|
"learning_rate": 7.610878661087867e-05,
|
|
"loss": 0.1652,
|
|
"step": 2910
|
|
},
|
|
{
|
|
"epoch": 224.64,
|
|
"grad_norm": 0.007240507751703262,
|
|
"learning_rate": 7.602510460251047e-05,
|
|
"loss": 0.1246,
|
|
"step": 2920
|
|
},
|
|
{
|
|
"epoch": 225.4,
|
|
"grad_norm": 0.2530362010002136,
|
|
"learning_rate": 7.594142259414227e-05,
|
|
"loss": 0.0838,
|
|
"step": 2930
|
|
},
|
|
{
|
|
"epoch": 226.16,
|
|
"grad_norm": 0.5732694864273071,
|
|
"learning_rate": 7.585774058577406e-05,
|
|
"loss": 0.1891,
|
|
"step": 2940
|
|
},
|
|
{
|
|
"epoch": 226.96,
|
|
"grad_norm": 3.3265058994293213,
|
|
"learning_rate": 7.577405857740586e-05,
|
|
"loss": 0.219,
|
|
"step": 2950
|
|
},
|
|
{
|
|
"epoch": 227.72,
|
|
"grad_norm": 19.179094314575195,
|
|
"learning_rate": 7.569037656903766e-05,
|
|
"loss": 0.1078,
|
|
"step": 2960
|
|
},
|
|
{
|
|
"epoch": 228.48,
|
|
"grad_norm": 0.5755670666694641,
|
|
"learning_rate": 7.560669456066946e-05,
|
|
"loss": 0.1628,
|
|
"step": 2970
|
|
},
|
|
{
|
|
"epoch": 229.24,
|
|
"grad_norm": 13.489233016967773,
|
|
"learning_rate": 7.552301255230127e-05,
|
|
"loss": 0.1031,
|
|
"step": 2980
|
|
},
|
|
{
|
|
"epoch": 230.0,
|
|
"grad_norm": 0.42450758814811707,
|
|
"learning_rate": 7.543933054393307e-05,
|
|
"loss": 0.1062,
|
|
"step": 2990
|
|
},
|
|
{
|
|
"epoch": 230.8,
|
|
"grad_norm": 74.2090835571289,
|
|
"learning_rate": 7.535564853556486e-05,
|
|
"loss": 0.2763,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 231.56,
|
|
"grad_norm": 0.0013025101507082582,
|
|
"learning_rate": 7.527196652719666e-05,
|
|
"loss": 0.0475,
|
|
"step": 3010
|
|
},
|
|
{
|
|
"epoch": 232.32,
|
|
"grad_norm": 0.36847984790802,
|
|
"learning_rate": 7.518828451882846e-05,
|
|
"loss": 0.1262,
|
|
"step": 3020
|
|
},
|
|
{
|
|
"epoch": 233.08,
|
|
"grad_norm": 2.1009180545806885,
|
|
"learning_rate": 7.510460251046026e-05,
|
|
"loss": 0.2701,
|
|
"step": 3030
|
|
},
|
|
{
|
|
"epoch": 233.88,
|
|
"grad_norm": 3.9676990509033203,
|
|
"learning_rate": 7.502092050209205e-05,
|
|
"loss": 0.0567,
|
|
"step": 3040
|
|
},
|
|
{
|
|
"epoch": 234.64,
|
|
"grad_norm": 2.426058769226074,
|
|
"learning_rate": 7.493723849372385e-05,
|
|
"loss": 0.1485,
|
|
"step": 3050
|
|
},
|
|
{
|
|
"epoch": 235.4,
|
|
"grad_norm": 0.45143410563468933,
|
|
"learning_rate": 7.485355648535565e-05,
|
|
"loss": 0.0808,
|
|
"step": 3060
|
|
},
|
|
{
|
|
"epoch": 236.16,
|
|
"grad_norm": 0.001772599876858294,
|
|
"learning_rate": 7.476987447698746e-05,
|
|
"loss": 0.159,
|
|
"step": 3070
|
|
},
|
|
{
|
|
"epoch": 236.96,
|
|
"grad_norm": 0.6228379011154175,
|
|
"learning_rate": 7.468619246861926e-05,
|
|
"loss": 0.3161,
|
|
"step": 3080
|
|
},
|
|
{
|
|
"epoch": 237.72,
|
|
"grad_norm": 1.1277211904525757,
|
|
"learning_rate": 7.460251046025106e-05,
|
|
"loss": 0.0895,
|
|
"step": 3090
|
|
},
|
|
{
|
|
"epoch": 238.48,
|
|
"grad_norm": 0.5020686388015747,
|
|
"learning_rate": 7.451882845188285e-05,
|
|
"loss": 0.066,
|
|
"step": 3100
|
|
},
|
|
{
|
|
"epoch": 239.24,
|
|
"grad_norm": 0.4364977777004242,
|
|
"learning_rate": 7.443514644351465e-05,
|
|
"loss": 0.0973,
|
|
"step": 3110
|
|
},
|
|
{
|
|
"epoch": 240.0,
|
|
"grad_norm": 0.331858366727829,
|
|
"learning_rate": 7.435146443514645e-05,
|
|
"loss": 0.1385,
|
|
"step": 3120
|
|
},
|
|
{
|
|
"epoch": 240.8,
|
|
"grad_norm": 3.562025308609009,
|
|
"learning_rate": 7.426778242677825e-05,
|
|
"loss": 0.1554,
|
|
"step": 3130
|
|
},
|
|
{
|
|
"epoch": 241.56,
|
|
"grad_norm": 0.4786972403526306,
|
|
"learning_rate": 7.418410041841004e-05,
|
|
"loss": 0.1127,
|
|
"step": 3140
|
|
},
|
|
{
|
|
"epoch": 242.32,
|
|
"grad_norm": 0.22106488049030304,
|
|
"learning_rate": 7.410041841004186e-05,
|
|
"loss": 0.1212,
|
|
"step": 3150
|
|
},
|
|
{
|
|
"epoch": 243.08,
|
|
"grad_norm": 0.3226916193962097,
|
|
"learning_rate": 7.401673640167365e-05,
|
|
"loss": 0.0622,
|
|
"step": 3160
|
|
},
|
|
{
|
|
"epoch": 243.88,
|
|
"grad_norm": 0.18059372901916504,
|
|
"learning_rate": 7.393305439330545e-05,
|
|
"loss": 0.0578,
|
|
"step": 3170
|
|
},
|
|
{
|
|
"epoch": 244.64,
|
|
"grad_norm": 0.18617404997348785,
|
|
"learning_rate": 7.384937238493725e-05,
|
|
"loss": 0.1017,
|
|
"step": 3180
|
|
},
|
|
{
|
|
"epoch": 245.4,
|
|
"grad_norm": 0.4568879008293152,
|
|
"learning_rate": 7.376569037656905e-05,
|
|
"loss": 0.0456,
|
|
"step": 3190
|
|
},
|
|
{
|
|
"epoch": 246.16,
|
|
"grad_norm": 0.00673318887129426,
|
|
"learning_rate": 7.368200836820084e-05,
|
|
"loss": 0.0578,
|
|
"step": 3200
|
|
},
|
|
{
|
|
"epoch": 246.96,
|
|
"grad_norm": 0.0004931857693009079,
|
|
"learning_rate": 7.359832635983264e-05,
|
|
"loss": 0.0363,
|
|
"step": 3210
|
|
},
|
|
{
|
|
"epoch": 247.72,
|
|
"grad_norm": 0.18300887942314148,
|
|
"learning_rate": 7.351464435146444e-05,
|
|
"loss": 0.0854,
|
|
"step": 3220
|
|
},
|
|
{
|
|
"epoch": 248.48,
|
|
"grad_norm": 0.003890759777277708,
|
|
"learning_rate": 7.343096234309624e-05,
|
|
"loss": 0.1226,
|
|
"step": 3230
|
|
},
|
|
{
|
|
"epoch": 249.24,
|
|
"grad_norm": 1.134564757347107,
|
|
"learning_rate": 7.334728033472805e-05,
|
|
"loss": 0.0956,
|
|
"step": 3240
|
|
},
|
|
{
|
|
"epoch": 250.0,
|
|
"grad_norm": 0.00022713415091857314,
|
|
"learning_rate": 7.326359832635985e-05,
|
|
"loss": 0.1,
|
|
"step": 3250
|
|
},
|
|
{
|
|
"epoch": 250.8,
|
|
"grad_norm": 7.879546165466309,
|
|
"learning_rate": 7.317991631799164e-05,
|
|
"loss": 0.0877,
|
|
"step": 3260
|
|
},
|
|
{
|
|
"epoch": 251.56,
|
|
"grad_norm": 2.57737135887146,
|
|
"learning_rate": 7.309623430962344e-05,
|
|
"loss": 0.09,
|
|
"step": 3270
|
|
},
|
|
{
|
|
"epoch": 252.32,
|
|
"grad_norm": 0.4551700949668884,
|
|
"learning_rate": 7.301255230125524e-05,
|
|
"loss": 0.1288,
|
|
"step": 3280
|
|
}
|
|
],
|
|
"logging_steps": 10,
|
|
"max_steps": 12000,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 1000,
|
|
"save_steps": 10,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": false
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 1.8125347399735104e+17,
|
|
"train_batch_size": 1,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|