|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9995577178239717, |
|
"eval_steps": 142, |
|
"global_step": 565, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.582726955413818, |
|
"learning_rate": 3e-06, |
|
"loss": 3.3182, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"eval_loss": 3.3362529277801514, |
|
"eval_runtime": 14.4066, |
|
"eval_samples_per_second": 33.11, |
|
"eval_steps_per_second": 8.329, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.511408805847168, |
|
"learning_rate": 6e-06, |
|
"loss": 3.2788, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.698073387145996, |
|
"learning_rate": 9e-06, |
|
"loss": 3.3753, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.63289213180542, |
|
"learning_rate": 1.2e-05, |
|
"loss": 3.3188, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.522731781005859, |
|
"learning_rate": 1.5e-05, |
|
"loss": 3.3082, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.691629409790039, |
|
"learning_rate": 1.8e-05, |
|
"loss": 3.2839, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.7225470542907715, |
|
"learning_rate": 2.1e-05, |
|
"loss": 3.2686, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.691238880157471, |
|
"learning_rate": 2.4e-05, |
|
"loss": 3.0782, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.64508581161499, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 2.9714, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.355061054229736, |
|
"learning_rate": 3e-05, |
|
"loss": 2.6886, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.206029891967773, |
|
"learning_rate": 2.9999973928796923e-05, |
|
"loss": 2.4829, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.8142871856689453, |
|
"learning_rate": 2.999989571527831e-05, |
|
"loss": 2.0556, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.5461277961730957, |
|
"learning_rate": 2.9999765359716046e-05, |
|
"loss": 1.6669, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.3414511680603027, |
|
"learning_rate": 2.999958286256327e-05, |
|
"loss": 1.3396, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.0794336795806885, |
|
"learning_rate": 2.9999348224454367e-05, |
|
"loss": 1.0494, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.524055242538452, |
|
"learning_rate": 2.9999061446204985e-05, |
|
"loss": 0.7706, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.8878108263015747, |
|
"learning_rate": 2.9998722528812e-05, |
|
"loss": 0.5666, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.5863118171691895, |
|
"learning_rate": 2.9998331473453557e-05, |
|
"loss": 0.4017, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.3763269186019897, |
|
"learning_rate": 2.9997888281489015e-05, |
|
"loss": 0.3645, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.0631593465805054, |
|
"learning_rate": 2.9997392954458985e-05, |
|
"loss": 0.2886, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.9699187278747559, |
|
"learning_rate": 2.9996845494085306e-05, |
|
"loss": 0.1911, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.8020363450050354, |
|
"learning_rate": 2.999624590227103e-05, |
|
"loss": 0.1762, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.8209808468818665, |
|
"learning_rate": 2.9995594181100443e-05, |
|
"loss": 0.1449, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.39430710673332214, |
|
"learning_rate": 2.9994890332839027e-05, |
|
"loss": 0.1201, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.9686388969421387, |
|
"learning_rate": 2.9994134359933476e-05, |
|
"loss": 0.1421, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.3096848726272583, |
|
"learning_rate": 2.999332626501167e-05, |
|
"loss": 0.1178, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.7969554662704468, |
|
"learning_rate": 2.9992466050882673e-05, |
|
"loss": 0.1127, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.6500488519668579, |
|
"learning_rate": 2.9991553720536733e-05, |
|
"loss": 0.0909, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.3015730381011963, |
|
"learning_rate": 2.9990589277145254e-05, |
|
"loss": 0.2658, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.9849212169647217, |
|
"learning_rate": 2.9989572724060797e-05, |
|
"loss": 0.0956, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.366385579109192, |
|
"learning_rate": 2.998850406481707e-05, |
|
"loss": 0.1276, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.42471978068351746, |
|
"learning_rate": 2.9987383303128887e-05, |
|
"loss": 0.0885, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.2538015842437744, |
|
"learning_rate": 2.9986210442892215e-05, |
|
"loss": 0.1246, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.9688710570335388, |
|
"learning_rate": 2.9984985488184086e-05, |
|
"loss": 0.147, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.4737780690193176, |
|
"learning_rate": 2.9983708443262656e-05, |
|
"loss": 0.1141, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.33743003010749817, |
|
"learning_rate": 2.9982379312567126e-05, |
|
"loss": 0.119, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.49797967076301575, |
|
"learning_rate": 2.998099810071777e-05, |
|
"loss": 0.1319, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.8518417477607727, |
|
"learning_rate": 2.9979564812515906e-05, |
|
"loss": 0.1609, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.2824989855289459, |
|
"learning_rate": 2.9978079452943875e-05, |
|
"loss": 0.1126, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.2989768981933594, |
|
"learning_rate": 2.9976542027165016e-05, |
|
"loss": 0.1164, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.49210599064826965, |
|
"learning_rate": 2.9974952540523676e-05, |
|
"loss": 0.1175, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.35604602098464966, |
|
"learning_rate": 2.997331099854516e-05, |
|
"loss": 0.0876, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.6768718957901001, |
|
"learning_rate": 2.9971617406935735e-05, |
|
"loss": 0.1649, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.4443073868751526, |
|
"learning_rate": 2.9969871771582596e-05, |
|
"loss": 0.0655, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.8104651570320129, |
|
"learning_rate": 2.996807409855385e-05, |
|
"loss": 0.1336, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.4232860803604126, |
|
"learning_rate": 2.99662243940985e-05, |
|
"loss": 0.0993, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.5418788194656372, |
|
"learning_rate": 2.9964322664646412e-05, |
|
"loss": 0.1131, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.39015039801597595, |
|
"learning_rate": 2.9962368916808308e-05, |
|
"loss": 0.13, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.40271061658859253, |
|
"learning_rate": 2.9960363157375724e-05, |
|
"loss": 0.1088, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.23656688630580902, |
|
"learning_rate": 2.9958305393321e-05, |
|
"loss": 0.1209, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.4160294234752655, |
|
"learning_rate": 2.995619563179726e-05, |
|
"loss": 0.1214, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.25202676653862, |
|
"learning_rate": 2.9954033880138368e-05, |
|
"loss": 0.115, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.5302817821502686, |
|
"learning_rate": 2.995182014585892e-05, |
|
"loss": 0.124, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.063620924949646, |
|
"learning_rate": 2.9949554436654215e-05, |
|
"loss": 0.1347, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3588281273841858, |
|
"learning_rate": 2.994723676040022e-05, |
|
"loss": 0.0931, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.6446585059165955, |
|
"learning_rate": 2.9944867125153548e-05, |
|
"loss": 0.1108, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.31614363193511963, |
|
"learning_rate": 2.9942445539151432e-05, |
|
"loss": 0.0712, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.2502954304218292, |
|
"learning_rate": 2.9939972010811693e-05, |
|
"loss": 0.0914, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.33516696095466614, |
|
"learning_rate": 2.993744654873272e-05, |
|
"loss": 0.0923, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.2990712821483612, |
|
"learning_rate": 2.993486916169341e-05, |
|
"loss": 0.0899, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.40888020396232605, |
|
"learning_rate": 2.9932239858653183e-05, |
|
"loss": 0.0789, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.756536602973938, |
|
"learning_rate": 2.992955864875192e-05, |
|
"loss": 0.1707, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3337976932525635, |
|
"learning_rate": 2.9926825541309928e-05, |
|
"loss": 0.0676, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.6163253784179688, |
|
"learning_rate": 2.9924040545827936e-05, |
|
"loss": 0.0961, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.45453113317489624, |
|
"learning_rate": 2.9921203671987025e-05, |
|
"loss": 0.0724, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.64926677942276, |
|
"learning_rate": 2.9918314929648637e-05, |
|
"loss": 0.1015, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.37845009565353394, |
|
"learning_rate": 2.991537432885449e-05, |
|
"loss": 0.0964, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3352985382080078, |
|
"learning_rate": 2.991238187982659e-05, |
|
"loss": 0.0862, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.485416442155838, |
|
"learning_rate": 2.9909337592967176e-05, |
|
"loss": 0.1095, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.9582281708717346, |
|
"learning_rate": 2.9906241478858666e-05, |
|
"loss": 0.1314, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3674233555793762, |
|
"learning_rate": 2.990309354826366e-05, |
|
"loss": 0.109, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.408796101808548, |
|
"learning_rate": 2.9899893812124862e-05, |
|
"loss": 0.126, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.5258140563964844, |
|
"learning_rate": 2.989664228156507e-05, |
|
"loss": 0.0596, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3256467580795288, |
|
"learning_rate": 2.9893338967887128e-05, |
|
"loss": 0.0736, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.22679170966148376, |
|
"learning_rate": 2.988998388257388e-05, |
|
"loss": 0.0982, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.2959941625595093, |
|
"learning_rate": 2.988657703728815e-05, |
|
"loss": 0.0515, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.26353922486305237, |
|
"learning_rate": 2.9883118443872662e-05, |
|
"loss": 0.0732, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.23671653866767883, |
|
"learning_rate": 2.9879608114350064e-05, |
|
"loss": 0.0865, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.415596067905426, |
|
"learning_rate": 2.9876046060922803e-05, |
|
"loss": 0.0871, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.5430477261543274, |
|
"learning_rate": 2.987243229597316e-05, |
|
"loss": 0.1385, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.6044889092445374, |
|
"learning_rate": 2.9868766832063156e-05, |
|
"loss": 0.1279, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.452231764793396, |
|
"learning_rate": 2.986504968193454e-05, |
|
"loss": 0.0767, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3864612579345703, |
|
"learning_rate": 2.9861280858508712e-05, |
|
"loss": 0.0728, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3158702850341797, |
|
"learning_rate": 2.9857460374886717e-05, |
|
"loss": 0.087, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.448408842086792, |
|
"learning_rate": 2.985358824434916e-05, |
|
"loss": 0.078, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.4518391788005829, |
|
"learning_rate": 2.984966448035619e-05, |
|
"loss": 0.1013, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.9920629858970642, |
|
"learning_rate": 2.9845689096547442e-05, |
|
"loss": 0.1145, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.5967357754707336, |
|
"learning_rate": 2.9841662106741986e-05, |
|
"loss": 0.1132, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.5016050338745117, |
|
"learning_rate": 2.983758352493829e-05, |
|
"loss": 0.094, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.300605833530426, |
|
"learning_rate": 2.983345336531415e-05, |
|
"loss": 0.0762, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.44358232617378235, |
|
"learning_rate": 2.9829271642226665e-05, |
|
"loss": 0.0969, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.9166831374168396, |
|
"learning_rate": 2.9825038370212183e-05, |
|
"loss": 0.1532, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.6240636706352234, |
|
"learning_rate": 2.982075356398623e-05, |
|
"loss": 0.1351, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.34054040908813477, |
|
"learning_rate": 2.9816417238443482e-05, |
|
"loss": 0.0803, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3595755100250244, |
|
"learning_rate": 2.9812029408657698e-05, |
|
"loss": 0.0522, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.4143121540546417, |
|
"learning_rate": 2.9807590089881687e-05, |
|
"loss": 0.0816, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.2190728336572647, |
|
"learning_rate": 2.980309929754722e-05, |
|
"loss": 0.0606, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.23459163308143616, |
|
"learning_rate": 2.9798557047265023e-05, |
|
"loss": 0.0841, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.24551355838775635, |
|
"learning_rate": 2.979396335482469e-05, |
|
"loss": 0.0545, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.471500426530838, |
|
"learning_rate": 2.9789318236194618e-05, |
|
"loss": 0.0948, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.5168260335922241, |
|
"learning_rate": 2.9784621707521993e-05, |
|
"loss": 0.1471, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.7932892441749573, |
|
"learning_rate": 2.97798737851327e-05, |
|
"loss": 0.1101, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.32231828570365906, |
|
"learning_rate": 2.977507448553128e-05, |
|
"loss": 0.0525, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.30824771523475647, |
|
"learning_rate": 2.9770223825400872e-05, |
|
"loss": 0.0965, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.5549985766410828, |
|
"learning_rate": 2.9765321821603143e-05, |
|
"loss": 0.1105, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.6406698822975159, |
|
"learning_rate": 2.9760368491178244e-05, |
|
"loss": 0.1111, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3495526909828186, |
|
"learning_rate": 2.9755363851344753e-05, |
|
"loss": 0.0792, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3962193429470062, |
|
"learning_rate": 2.9750307919499595e-05, |
|
"loss": 0.0894, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.5372034907341003, |
|
"learning_rate": 2.9745200713218002e-05, |
|
"loss": 0.1735, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.22693391144275665, |
|
"learning_rate": 2.9740042250253443e-05, |
|
"loss": 0.0603, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.4251677393913269, |
|
"learning_rate": 2.973483254853756e-05, |
|
"loss": 0.1124, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.22285960614681244, |
|
"learning_rate": 2.9729571626180116e-05, |
|
"loss": 0.0677, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.18614518642425537, |
|
"learning_rate": 2.972425950146891e-05, |
|
"loss": 0.0854, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.3953099250793457, |
|
"learning_rate": 2.9718896192869758e-05, |
|
"loss": 0.0832, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.2483922243118286, |
|
"learning_rate": 2.9713481719026368e-05, |
|
"loss": 0.0992, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3185414969921112, |
|
"learning_rate": 2.970801609876032e-05, |
|
"loss": 0.0707, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.40958282351493835, |
|
"learning_rate": 2.9702499351070992e-05, |
|
"loss": 0.1019, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.5805838704109192, |
|
"learning_rate": 2.969693149513548e-05, |
|
"loss": 0.116, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.21251153945922852, |
|
"learning_rate": 2.969131255030855e-05, |
|
"loss": 0.0843, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.4262985587120056, |
|
"learning_rate": 2.9685642536122545e-05, |
|
"loss": 0.0706, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.4474552869796753, |
|
"learning_rate": 2.9679921472287358e-05, |
|
"loss": 0.0859, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3137888014316559, |
|
"learning_rate": 2.967414937869031e-05, |
|
"loss": 0.1316, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.2894512712955475, |
|
"learning_rate": 2.9668326275396133e-05, |
|
"loss": 0.0715, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.2738913595676422, |
|
"learning_rate": 2.966245218264687e-05, |
|
"loss": 0.0524, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.6975142955780029, |
|
"learning_rate": 2.96565271208618e-05, |
|
"loss": 0.1225, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.5777083039283752, |
|
"learning_rate": 2.9650551110637397e-05, |
|
"loss": 0.1309, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.8783730864524841, |
|
"learning_rate": 2.964452417274723e-05, |
|
"loss": 0.0995, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.28593194484710693, |
|
"learning_rate": 2.96384463281419e-05, |
|
"loss": 0.0771, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.35265710949897766, |
|
"learning_rate": 2.9632317597948968e-05, |
|
"loss": 0.0696, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.4801347851753235, |
|
"learning_rate": 2.9626138003472884e-05, |
|
"loss": 0.0954, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.7368841171264648, |
|
"learning_rate": 2.9619907566194915e-05, |
|
"loss": 0.1493, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3211562931537628, |
|
"learning_rate": 2.9613626307773055e-05, |
|
"loss": 0.0721, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.2380824089050293, |
|
"learning_rate": 2.9607294250041965e-05, |
|
"loss": 0.0574, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4969393312931061, |
|
"learning_rate": 2.96009114150129e-05, |
|
"loss": 0.0463, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4968222379684448, |
|
"learning_rate": 2.959447782487361e-05, |
|
"loss": 0.1302, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.5577062368392944, |
|
"learning_rate": 2.9587993501988292e-05, |
|
"loss": 0.1239, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.5446056127548218, |
|
"learning_rate": 2.958145846889749e-05, |
|
"loss": 0.1169, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.3976798355579376, |
|
"learning_rate": 2.957487274831803e-05, |
|
"loss": 0.1012, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.6089766621589661, |
|
"learning_rate": 2.9568236363142927e-05, |
|
"loss": 0.0859, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.3312886655330658, |
|
"learning_rate": 2.9561549336441333e-05, |
|
"loss": 0.0996, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.3692180812358856, |
|
"learning_rate": 2.955481169145841e-05, |
|
"loss": 0.0958, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.23727117478847504, |
|
"learning_rate": 2.95480234516153e-05, |
|
"loss": 0.0815, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 0.09037704020738602, |
|
"eval_runtime": 14.7372, |
|
"eval_samples_per_second": 32.367, |
|
"eval_steps_per_second": 8.143, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.44341254234313965, |
|
"learning_rate": 2.9541184640509015e-05, |
|
"loss": 0.108, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.27407336235046387, |
|
"learning_rate": 2.953429528191236e-05, |
|
"loss": 0.1192, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.39919063448905945, |
|
"learning_rate": 2.9527355399773847e-05, |
|
"loss": 0.0832, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.3581640422344208, |
|
"learning_rate": 2.9520365018217622e-05, |
|
"loss": 0.0625, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.29779088497161865, |
|
"learning_rate": 2.951332416154337e-05, |
|
"loss": 0.111, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.21348236501216888, |
|
"learning_rate": 2.9506232854226242e-05, |
|
"loss": 0.092, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.3025546073913574, |
|
"learning_rate": 2.9499091120916757e-05, |
|
"loss": 0.1217, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.24288436770439148, |
|
"learning_rate": 2.9491898986440726e-05, |
|
"loss": 0.0752, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.22768545150756836, |
|
"learning_rate": 2.9484656475799164e-05, |
|
"loss": 0.0964, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.7110497355461121, |
|
"learning_rate": 2.9477363614168197e-05, |
|
"loss": 0.1099, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.7041640281677246, |
|
"learning_rate": 2.9470020426898983e-05, |
|
"loss": 0.1599, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4882669746875763, |
|
"learning_rate": 2.946262693951762e-05, |
|
"loss": 0.1059, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.2939213812351227, |
|
"learning_rate": 2.9455183177725055e-05, |
|
"loss": 0.0776, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.25689342617988586, |
|
"learning_rate": 2.9447689167397e-05, |
|
"loss": 0.098, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.22836799919605255, |
|
"learning_rate": 2.944014493458383e-05, |
|
"loss": 0.0838, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.525640606880188, |
|
"learning_rate": 2.9432550505510516e-05, |
|
"loss": 0.0958, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6011547446250916, |
|
"learning_rate": 2.942490590657651e-05, |
|
"loss": 0.1154, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.3123567998409271, |
|
"learning_rate": 2.9417211164355668e-05, |
|
"loss": 0.1097, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.39742252230644226, |
|
"learning_rate": 2.9409466305596135e-05, |
|
"loss": 0.0671, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.2927655279636383, |
|
"learning_rate": 2.9401671357220297e-05, |
|
"loss": 0.0752, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.6094531416893005, |
|
"learning_rate": 2.9393826346324634e-05, |
|
"loss": 0.1437, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.23876389861106873, |
|
"learning_rate": 2.9385931300179675e-05, |
|
"loss": 0.1064, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.26306307315826416, |
|
"learning_rate": 2.9377986246229853e-05, |
|
"loss": 0.065, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.3406679332256317, |
|
"learning_rate": 2.9369991212093462e-05, |
|
"loss": 0.0939, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.5959851741790771, |
|
"learning_rate": 2.9361946225562516e-05, |
|
"loss": 0.1163, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.3201580047607422, |
|
"learning_rate": 2.9353851314602676e-05, |
|
"loss": 0.115, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4657009243965149, |
|
"learning_rate": 2.9345706507353158e-05, |
|
"loss": 0.0877, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.313793420791626, |
|
"learning_rate": 2.9337511832126616e-05, |
|
"loss": 0.0574, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.2866755723953247, |
|
"learning_rate": 2.9329267317409053e-05, |
|
"loss": 0.0968, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.2852758765220642, |
|
"learning_rate": 2.9320972991859728e-05, |
|
"loss": 0.1091, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.3556657135486603, |
|
"learning_rate": 2.9312628884311048e-05, |
|
"loss": 0.0716, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.3235069215297699, |
|
"learning_rate": 2.9304235023768465e-05, |
|
"loss": 0.0532, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.3486980199813843, |
|
"learning_rate": 2.9295791439410387e-05, |
|
"loss": 0.1146, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.319968044757843, |
|
"learning_rate": 2.9287298160588073e-05, |
|
"loss": 0.1007, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.48733624815940857, |
|
"learning_rate": 2.927875521682551e-05, |
|
"loss": 0.1164, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2947496473789215, |
|
"learning_rate": 2.9270162637819352e-05, |
|
"loss": 0.0434, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.35532888770103455, |
|
"learning_rate": 2.9261520453438775e-05, |
|
"loss": 0.0498, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.5203955173492432, |
|
"learning_rate": 2.9252828693725404e-05, |
|
"loss": 0.1061, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4153119921684265, |
|
"learning_rate": 2.9244087388893187e-05, |
|
"loss": 0.0953, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.41368868947029114, |
|
"learning_rate": 2.9235296569328303e-05, |
|
"loss": 0.1015, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4907214343547821, |
|
"learning_rate": 2.922645626558905e-05, |
|
"loss": 0.1213, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.2939632833003998, |
|
"learning_rate": 2.921756650840574e-05, |
|
"loss": 0.0757, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.36250534653663635, |
|
"learning_rate": 2.92086273286806e-05, |
|
"loss": 0.0983, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.34401020407676697, |
|
"learning_rate": 2.9199638757487648e-05, |
|
"loss": 0.115, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.24301297962665558, |
|
"learning_rate": 2.9190600826072603e-05, |
|
"loss": 0.0871, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.3000582754611969, |
|
"learning_rate": 2.918151356585276e-05, |
|
"loss": 0.0675, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.23538866639137268, |
|
"learning_rate": 2.9172377008416898e-05, |
|
"loss": 0.0645, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.3890560567378998, |
|
"learning_rate": 2.916319118552515e-05, |
|
"loss": 0.1009, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4634495973587036, |
|
"learning_rate": 2.9153956129108918e-05, |
|
"loss": 0.098, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.20195429027080536, |
|
"learning_rate": 2.9144671871270734e-05, |
|
"loss": 0.0894, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.42528125643730164, |
|
"learning_rate": 2.913533844428417e-05, |
|
"loss": 0.1398, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.36887580156326294, |
|
"learning_rate": 2.912595588059371e-05, |
|
"loss": 0.1066, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.17726579308509827, |
|
"learning_rate": 2.9116524212814653e-05, |
|
"loss": 0.0611, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.19946645200252533, |
|
"learning_rate": 2.9107043473733e-05, |
|
"loss": 0.0947, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.3372867703437805, |
|
"learning_rate": 2.9097513696305304e-05, |
|
"loss": 0.0952, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.3198525011539459, |
|
"learning_rate": 2.908793491365861e-05, |
|
"loss": 0.1155, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.3473115861415863, |
|
"learning_rate": 2.90783071590903e-05, |
|
"loss": 0.0752, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.20476488769054413, |
|
"learning_rate": 2.9068630466067997e-05, |
|
"loss": 0.0708, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.3317318558692932, |
|
"learning_rate": 2.905890486822943e-05, |
|
"loss": 0.111, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.247785285115242, |
|
"learning_rate": 2.9049130399382345e-05, |
|
"loss": 0.0855, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.18749944865703583, |
|
"learning_rate": 2.903930709350436e-05, |
|
"loss": 0.0864, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.43616777658462524, |
|
"learning_rate": 2.9029434984742866e-05, |
|
"loss": 0.1307, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.23449763655662537, |
|
"learning_rate": 2.9019514107414888e-05, |
|
"loss": 0.086, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.3940359055995941, |
|
"learning_rate": 2.9009544496006998e-05, |
|
"loss": 0.0711, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.8495943546295166, |
|
"learning_rate": 2.8999526185175155e-05, |
|
"loss": 0.1159, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6435235142707825, |
|
"learning_rate": 2.898945920974462e-05, |
|
"loss": 0.0951, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.24877464771270752, |
|
"learning_rate": 2.8979343604709818e-05, |
|
"loss": 0.0472, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.3478896915912628, |
|
"learning_rate": 2.8969179405234202e-05, |
|
"loss": 0.1026, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.4590359032154083, |
|
"learning_rate": 2.8958966646650172e-05, |
|
"loss": 0.1551, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.33144280314445496, |
|
"learning_rate": 2.894870536445891e-05, |
|
"loss": 0.069, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.0096515417099, |
|
"learning_rate": 2.893839559433028e-05, |
|
"loss": 0.1919, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4398234188556671, |
|
"learning_rate": 2.8928037372102698e-05, |
|
"loss": 0.1481, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.2270442694425583, |
|
"learning_rate": 2.8917630733783004e-05, |
|
"loss": 0.0811, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.30207884311676025, |
|
"learning_rate": 2.8907175715546337e-05, |
|
"loss": 0.091, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.21270009875297546, |
|
"learning_rate": 2.889667235373603e-05, |
|
"loss": 0.092, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.2912154197692871, |
|
"learning_rate": 2.888612068486344e-05, |
|
"loss": 0.0834, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.23577392101287842, |
|
"learning_rate": 2.887552074560787e-05, |
|
"loss": 0.1215, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.16501927375793457, |
|
"learning_rate": 2.8864872572816407e-05, |
|
"loss": 0.0665, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.1898716688156128, |
|
"learning_rate": 2.885417620350381e-05, |
|
"loss": 0.0595, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.25912415981292725, |
|
"learning_rate": 2.8843431674852366e-05, |
|
"loss": 0.0455, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.33551594614982605, |
|
"learning_rate": 2.883263902421179e-05, |
|
"loss": 0.0836, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.2037317305803299, |
|
"learning_rate": 2.8821798289099054e-05, |
|
"loss": 0.0974, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.29482024908065796, |
|
"learning_rate": 2.881090950719831e-05, |
|
"loss": 0.1051, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.20771493017673492, |
|
"learning_rate": 2.87999727163607e-05, |
|
"loss": 0.0761, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.2051924616098404, |
|
"learning_rate": 2.878898795460426e-05, |
|
"loss": 0.0553, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.2791610062122345, |
|
"learning_rate": 2.8777955260113794e-05, |
|
"loss": 0.0651, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.35246115922927856, |
|
"learning_rate": 2.8766874671240708e-05, |
|
"loss": 0.1042, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.29613715410232544, |
|
"learning_rate": 2.8755746226502914e-05, |
|
"loss": 0.0679, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.5915008783340454, |
|
"learning_rate": 2.874456996458467e-05, |
|
"loss": 0.1112, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.5234435200691223, |
|
"learning_rate": 2.8733345924336448e-05, |
|
"loss": 0.1, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.582992434501648, |
|
"learning_rate": 2.872207414477482e-05, |
|
"loss": 0.1505, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.463219553232193, |
|
"learning_rate": 2.8710754665082295e-05, |
|
"loss": 0.1148, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.3954102694988251, |
|
"learning_rate": 2.8699387524607206e-05, |
|
"loss": 0.1252, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.2828240692615509, |
|
"learning_rate": 2.868797276286355e-05, |
|
"loss": 0.0916, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.6134512424468994, |
|
"learning_rate": 2.8676510419530875e-05, |
|
"loss": 0.0953, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4864056408405304, |
|
"learning_rate": 2.866500053445412e-05, |
|
"loss": 0.0887, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.34841328859329224, |
|
"learning_rate": 2.86534431476435e-05, |
|
"loss": 0.0919, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.22810260951519012, |
|
"learning_rate": 2.8641838299274336e-05, |
|
"loss": 0.0515, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.7178006768226624, |
|
"learning_rate": 2.863018602968695e-05, |
|
"loss": 0.2241, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.3194310665130615, |
|
"learning_rate": 2.8618486379386496e-05, |
|
"loss": 0.111, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.15743738412857056, |
|
"learning_rate": 2.8606739389042838e-05, |
|
"loss": 0.0634, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.24189315736293793, |
|
"learning_rate": 2.8594945099490395e-05, |
|
"loss": 0.0644, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.21816010773181915, |
|
"learning_rate": 2.8583103551728008e-05, |
|
"loss": 0.0852, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.282953679561615, |
|
"learning_rate": 2.857121478691881e-05, |
|
"loss": 0.0842, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2257271707057953, |
|
"learning_rate": 2.855927884639004e-05, |
|
"loss": 0.0686, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2596382796764374, |
|
"learning_rate": 2.854729577163294e-05, |
|
"loss": 0.0595, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.3601246178150177, |
|
"learning_rate": 2.8535265604302614e-05, |
|
"loss": 0.0781, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.22987039387226105, |
|
"learning_rate": 2.852318838621784e-05, |
|
"loss": 0.0712, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2668680250644684, |
|
"learning_rate": 2.851106415936098e-05, |
|
"loss": 0.1015, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.3502819836139679, |
|
"learning_rate": 2.8498892965877776e-05, |
|
"loss": 0.1164, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4412795603275299, |
|
"learning_rate": 2.848667484807726e-05, |
|
"loss": 0.0891, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.506323516368866, |
|
"learning_rate": 2.8474409848431562e-05, |
|
"loss": 0.1134, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.519180178642273, |
|
"learning_rate": 2.8462098009575793e-05, |
|
"loss": 0.1147, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4656154215335846, |
|
"learning_rate": 2.8449739374307877e-05, |
|
"loss": 0.0939, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.23170435428619385, |
|
"learning_rate": 2.8437333985588418e-05, |
|
"loss": 0.0365, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.4948550760746002, |
|
"learning_rate": 2.8424881886540527e-05, |
|
"loss": 0.127, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.2533349096775055, |
|
"learning_rate": 2.8412383120449707e-05, |
|
"loss": 0.0582, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.3986698091030121, |
|
"learning_rate": 2.839983773076367e-05, |
|
"loss": 0.0687, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5078080296516418, |
|
"learning_rate": 2.8387245761092203e-05, |
|
"loss": 0.1166, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5207770466804504, |
|
"learning_rate": 2.8374607255207012e-05, |
|
"loss": 0.0615, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.578711211681366, |
|
"learning_rate": 2.8361922257041577e-05, |
|
"loss": 0.1259, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.3096364140510559, |
|
"learning_rate": 2.8349190810690977e-05, |
|
"loss": 0.0708, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4309447407722473, |
|
"learning_rate": 2.8336412960411765e-05, |
|
"loss": 0.1351, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4576749801635742, |
|
"learning_rate": 2.8323588750621802e-05, |
|
"loss": 0.0875, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.5308840274810791, |
|
"learning_rate": 2.8310718225900095e-05, |
|
"loss": 0.0803, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.32970359921455383, |
|
"learning_rate": 2.8297801430986652e-05, |
|
"loss": 0.0774, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.2413233071565628, |
|
"learning_rate": 2.8284838410782327e-05, |
|
"loss": 0.056, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.45652082562446594, |
|
"learning_rate": 2.8271829210348657e-05, |
|
"loss": 0.165, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.36683741211891174, |
|
"learning_rate": 2.82587738749077e-05, |
|
"loss": 0.1019, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.3328053057193756, |
|
"learning_rate": 2.824567244984192e-05, |
|
"loss": 0.1022, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.40834105014801025, |
|
"learning_rate": 2.8232524980693947e-05, |
|
"loss": 0.0607, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.2283175140619278, |
|
"learning_rate": 2.8219331513166503e-05, |
|
"loss": 0.0772, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.3196583092212677, |
|
"learning_rate": 2.8206092093122195e-05, |
|
"loss": 0.0834, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.27474430203437805, |
|
"learning_rate": 2.8192806766583373e-05, |
|
"loss": 0.0749, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.3614490330219269, |
|
"learning_rate": 2.8179475579731967e-05, |
|
"loss": 0.0888, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.20857541263103485, |
|
"learning_rate": 2.8166098578909315e-05, |
|
"loss": 0.035, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.27701324224472046, |
|
"learning_rate": 2.815267581061602e-05, |
|
"loss": 0.062, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.45753782987594604, |
|
"learning_rate": 2.8139207321511778e-05, |
|
"loss": 0.1062, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.3005506098270416, |
|
"learning_rate": 2.8125693158415217e-05, |
|
"loss": 0.1344, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.2129545509815216, |
|
"learning_rate": 2.8112133368303737e-05, |
|
"loss": 0.0382, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.37999290227890015, |
|
"learning_rate": 2.809852799831334e-05, |
|
"loss": 0.0812, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.1718507707118988, |
|
"learning_rate": 2.8084877095738477e-05, |
|
"loss": 0.022, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 0.08163367211818695, |
|
"eval_runtime": 14.651, |
|
"eval_samples_per_second": 32.558, |
|
"eval_steps_per_second": 8.191, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.27595949172973633, |
|
"learning_rate": 2.8071180708031874e-05, |
|
"loss": 0.0593, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.48061394691467285, |
|
"learning_rate": 2.8057438882804372e-05, |
|
"loss": 0.1139, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.32916727662086487, |
|
"learning_rate": 2.8043651667824767e-05, |
|
"loss": 0.0335, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.6290953755378723, |
|
"learning_rate": 2.8029819111019618e-05, |
|
"loss": 0.099, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.7197291851043701, |
|
"learning_rate": 2.8015941260473117e-05, |
|
"loss": 0.1165, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.4702732264995575, |
|
"learning_rate": 2.8002018164426896e-05, |
|
"loss": 0.0948, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.4018266201019287, |
|
"learning_rate": 2.798804987127988e-05, |
|
"loss": 0.0982, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.7587385773658752, |
|
"learning_rate": 2.7974036429588082e-05, |
|
"loss": 0.0915, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.36278435587882996, |
|
"learning_rate": 2.7959977888064484e-05, |
|
"loss": 0.0442, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.5261387228965759, |
|
"learning_rate": 2.7945874295578827e-05, |
|
"loss": 0.117, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.8715558648109436, |
|
"learning_rate": 2.7931725701157462e-05, |
|
"loss": 0.1461, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.3017662465572357, |
|
"learning_rate": 2.7917532153983176e-05, |
|
"loss": 0.0829, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.23709601163864136, |
|
"learning_rate": 2.790329370339501e-05, |
|
"loss": 0.0693, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.23073746263980865, |
|
"learning_rate": 2.7889010398888104e-05, |
|
"loss": 0.0657, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.35000571608543396, |
|
"learning_rate": 2.7874682290113514e-05, |
|
"loss": 0.0774, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.2956024408340454, |
|
"learning_rate": 2.786030942687805e-05, |
|
"loss": 0.0836, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.35490185022354126, |
|
"learning_rate": 2.7845891859144088e-05, |
|
"loss": 0.1066, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.5659674406051636, |
|
"learning_rate": 2.7831429637029402e-05, |
|
"loss": 0.1574, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.3070801794528961, |
|
"learning_rate": 2.7816922810807e-05, |
|
"loss": 0.0913, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.3739650547504425, |
|
"learning_rate": 2.7802371430904936e-05, |
|
"loss": 0.1343, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.6166901588439941, |
|
"learning_rate": 2.7787775547906142e-05, |
|
"loss": 0.0998, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.3676758408546448, |
|
"learning_rate": 2.7773135212548247e-05, |
|
"loss": 0.0673, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.4253583550453186, |
|
"learning_rate": 2.7758450475723405e-05, |
|
"loss": 0.1059, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.31137579679489136, |
|
"learning_rate": 2.774372138847812e-05, |
|
"loss": 0.0884, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.23509562015533447, |
|
"learning_rate": 2.7728948002013054e-05, |
|
"loss": 0.0792, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.459160715341568, |
|
"learning_rate": 2.771413036768288e-05, |
|
"loss": 0.1156, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.34745368361473083, |
|
"learning_rate": 2.769926853699606e-05, |
|
"loss": 0.0971, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.30285558104515076, |
|
"learning_rate": 2.7684362561614714e-05, |
|
"loss": 0.0954, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.30963224172592163, |
|
"learning_rate": 2.766941249335439e-05, |
|
"loss": 0.099, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.34839507937431335, |
|
"learning_rate": 2.765441838418393e-05, |
|
"loss": 0.1011, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.23209823668003082, |
|
"learning_rate": 2.7639380286225264e-05, |
|
"loss": 0.0662, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.3385458290576935, |
|
"learning_rate": 2.7624298251753232e-05, |
|
"loss": 0.1058, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.2462904453277588, |
|
"learning_rate": 2.7609172333195406e-05, |
|
"loss": 0.1128, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.3830464482307434, |
|
"learning_rate": 2.75940025831319e-05, |
|
"loss": 0.0927, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.23960565030574799, |
|
"learning_rate": 2.7578789054295202e-05, |
|
"loss": 0.0484, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.24503108859062195, |
|
"learning_rate": 2.7563531799569984e-05, |
|
"loss": 0.1001, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.3050799071788788, |
|
"learning_rate": 2.75482308719929e-05, |
|
"loss": 0.1168, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.4061020016670227, |
|
"learning_rate": 2.753288632475244e-05, |
|
"loss": 0.1424, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.30394184589385986, |
|
"learning_rate": 2.75174982111887e-05, |
|
"loss": 0.0774, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.22858074307441711, |
|
"learning_rate": 2.7502066584793243e-05, |
|
"loss": 0.0911, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.31271883845329285, |
|
"learning_rate": 2.7486591499208867e-05, |
|
"loss": 0.0555, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.22895589470863342, |
|
"learning_rate": 2.7471073008229462e-05, |
|
"loss": 0.0741, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.22698134183883667, |
|
"learning_rate": 2.7455511165799783e-05, |
|
"loss": 0.0829, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.24353599548339844, |
|
"learning_rate": 2.7439906026015297e-05, |
|
"loss": 0.0965, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.19678641855716705, |
|
"learning_rate": 2.742425764312197e-05, |
|
"loss": 0.0822, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.21248051524162292, |
|
"learning_rate": 2.7408566071516087e-05, |
|
"loss": 0.0713, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.47786301374435425, |
|
"learning_rate": 2.7392831365744074e-05, |
|
"loss": 0.0863, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.2300809770822525, |
|
"learning_rate": 2.7377053580502298e-05, |
|
"loss": 0.0864, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.34608194231987, |
|
"learning_rate": 2.7361232770636856e-05, |
|
"loss": 0.0869, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.37124645709991455, |
|
"learning_rate": 2.7345368991143433e-05, |
|
"loss": 0.1013, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.4118335545063019, |
|
"learning_rate": 2.732946229716707e-05, |
|
"loss": 0.1163, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.5510608553886414, |
|
"learning_rate": 2.7313512744001982e-05, |
|
"loss": 0.1305, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.21711857616901398, |
|
"learning_rate": 2.7297520387091376e-05, |
|
"loss": 0.0668, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.24401220679283142, |
|
"learning_rate": 2.7281485282027252e-05, |
|
"loss": 0.0855, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.33350592851638794, |
|
"learning_rate": 2.7265407484550206e-05, |
|
"loss": 0.0632, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7973533868789673, |
|
"learning_rate": 2.724928705054924e-05, |
|
"loss": 0.1894, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.4574988782405853, |
|
"learning_rate": 2.7233124036061575e-05, |
|
"loss": 0.105, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.2864499092102051, |
|
"learning_rate": 2.7216918497272426e-05, |
|
"loss": 0.0826, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.2666662335395813, |
|
"learning_rate": 2.7200670490514865e-05, |
|
"loss": 0.0723, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.27098262310028076, |
|
"learning_rate": 2.7184380072269558e-05, |
|
"loss": 0.0579, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.22073043882846832, |
|
"learning_rate": 2.7168047299164614e-05, |
|
"loss": 0.0539, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.2872254550457001, |
|
"learning_rate": 2.7151672227975377e-05, |
|
"loss": 0.1239, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.41940388083457947, |
|
"learning_rate": 2.7135254915624213e-05, |
|
"loss": 0.0755, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.2658102214336395, |
|
"learning_rate": 2.711879541918034e-05, |
|
"loss": 0.1158, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.23337627947330475, |
|
"learning_rate": 2.71022937958596e-05, |
|
"loss": 0.0814, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.26450300216674805, |
|
"learning_rate": 2.7085750103024296e-05, |
|
"loss": 0.0393, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.34251630306243896, |
|
"learning_rate": 2.7069164398182948e-05, |
|
"loss": 0.0718, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.3174653947353363, |
|
"learning_rate": 2.7052536738990125e-05, |
|
"loss": 0.0838, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.2430477887392044, |
|
"learning_rate": 2.7035867183246247e-05, |
|
"loss": 0.1018, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.3248900771141052, |
|
"learning_rate": 2.7019155788897357e-05, |
|
"loss": 0.0764, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.24875646829605103, |
|
"learning_rate": 2.700240261403494e-05, |
|
"loss": 0.0731, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.209304079413414, |
|
"learning_rate": 2.6985607716895727e-05, |
|
"loss": 0.0482, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.24239419400691986, |
|
"learning_rate": 2.6968771155861464e-05, |
|
"loss": 0.0515, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.5304663777351379, |
|
"learning_rate": 2.695189298945875e-05, |
|
"loss": 0.0919, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.21653291583061218, |
|
"learning_rate": 2.6934973276358792e-05, |
|
"loss": 0.0604, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.2541111409664154, |
|
"learning_rate": 2.6918012075377226e-05, |
|
"loss": 0.0823, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.3744794428348541, |
|
"learning_rate": 2.6901009445473912e-05, |
|
"loss": 0.1051, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.28307440876960754, |
|
"learning_rate": 2.6883965445752714e-05, |
|
"loss": 0.0425, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.49167400598526, |
|
"learning_rate": 2.6866880135461314e-05, |
|
"loss": 0.1167, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.3999198079109192, |
|
"learning_rate": 2.684975357399099e-05, |
|
"loss": 0.0547, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.23400184512138367, |
|
"learning_rate": 2.683258582087641e-05, |
|
"loss": 0.0545, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1796039193868637, |
|
"learning_rate": 2.681537693579545e-05, |
|
"loss": 0.0346, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.5400248169898987, |
|
"learning_rate": 2.6798126978568942e-05, |
|
"loss": 0.1084, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.3125457167625427, |
|
"learning_rate": 2.6780836009160514e-05, |
|
"loss": 0.0445, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.48360738158226013, |
|
"learning_rate": 2.6763504087676346e-05, |
|
"loss": 0.0612, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9248375296592712, |
|
"learning_rate": 2.674613127436498e-05, |
|
"loss": 0.1119, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.4848339855670929, |
|
"learning_rate": 2.6728717629617093e-05, |
|
"loss": 0.1119, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8126673102378845, |
|
"learning_rate": 2.671126321396532e-05, |
|
"loss": 0.1964, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.3666157126426697, |
|
"learning_rate": 2.6693768088083994e-05, |
|
"loss": 0.0688, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.33919093012809753, |
|
"learning_rate": 2.6676232312788998e-05, |
|
"loss": 0.087, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.4218893349170685, |
|
"learning_rate": 2.6658655949037482e-05, |
|
"loss": 0.0704, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.2786632776260376, |
|
"learning_rate": 2.6641039057927724e-05, |
|
"loss": 0.0724, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.7727587819099426, |
|
"learning_rate": 2.662338170069885e-05, |
|
"loss": 0.1219, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.47525373101234436, |
|
"learning_rate": 2.6605683938730666e-05, |
|
"loss": 0.0894, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.6987584233283997, |
|
"learning_rate": 2.6587945833543432e-05, |
|
"loss": 0.0976, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.28406238555908203, |
|
"learning_rate": 2.6570167446797657e-05, |
|
"loss": 0.1048, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.2821677625179291, |
|
"learning_rate": 2.6552348840293856e-05, |
|
"loss": 0.0677, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.37019118666648865, |
|
"learning_rate": 2.6534490075972368e-05, |
|
"loss": 0.0901, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.3454105257987976, |
|
"learning_rate": 2.6516591215913118e-05, |
|
"loss": 0.0956, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.2895926535129547, |
|
"learning_rate": 2.6498652322335416e-05, |
|
"loss": 0.0539, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.4975215494632721, |
|
"learning_rate": 2.6480673457597737e-05, |
|
"loss": 0.0979, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.6093487739562988, |
|
"learning_rate": 2.646265468419749e-05, |
|
"loss": 0.146, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.3748679459095001, |
|
"learning_rate": 2.6444596064770837e-05, |
|
"loss": 0.131, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.3992125391960144, |
|
"learning_rate": 2.6426497662092424e-05, |
|
"loss": 0.08, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.18994563817977905, |
|
"learning_rate": 2.6408359539075204e-05, |
|
"loss": 0.064, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.2519935965538025, |
|
"learning_rate": 2.6390181758770208e-05, |
|
"loss": 0.1035, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.1836027204990387, |
|
"learning_rate": 2.6371964384366305e-05, |
|
"loss": 0.0876, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.21602681279182434, |
|
"learning_rate": 2.6353707479190022e-05, |
|
"loss": 0.0764, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.28096359968185425, |
|
"learning_rate": 2.6335411106705283e-05, |
|
"loss": 0.0598, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.3226209878921509, |
|
"learning_rate": 2.6317075330513212e-05, |
|
"loss": 0.1071, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.3776714503765106, |
|
"learning_rate": 2.6298700214351922e-05, |
|
"loss": 0.0576, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5143963694572449, |
|
"learning_rate": 2.628028582209625e-05, |
|
"loss": 0.0927, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5534598231315613, |
|
"learning_rate": 2.626183221775758e-05, |
|
"loss": 0.0813, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.3283978998661041, |
|
"learning_rate": 2.6243339465483605e-05, |
|
"loss": 0.091, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.26208245754241943, |
|
"learning_rate": 2.6224807629558094e-05, |
|
"loss": 0.0825, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.4244028925895691, |
|
"learning_rate": 2.6206236774400684e-05, |
|
"loss": 0.1077, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.39094752073287964, |
|
"learning_rate": 2.6187626964566644e-05, |
|
"loss": 0.1031, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.4204391539096832, |
|
"learning_rate": 2.6168978264746663e-05, |
|
"loss": 0.1153, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.3522421419620514, |
|
"learning_rate": 2.615029073976661e-05, |
|
"loss": 0.055, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.28039833903312683, |
|
"learning_rate": 2.6131564454587316e-05, |
|
"loss": 0.1058, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.3698963224887848, |
|
"learning_rate": 2.611279947430436e-05, |
|
"loss": 0.0639, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.23384957015514374, |
|
"learning_rate": 2.609399586414782e-05, |
|
"loss": 0.0443, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.24013498425483704, |
|
"learning_rate": 2.607515368948206e-05, |
|
"loss": 0.0607, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.6072742342948914, |
|
"learning_rate": 2.60562730158055e-05, |
|
"loss": 0.1062, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5543622970581055, |
|
"learning_rate": 2.6037353908750394e-05, |
|
"loss": 0.108, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2501017153263092, |
|
"learning_rate": 2.601839643408259e-05, |
|
"loss": 0.0445, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.3118091821670532, |
|
"learning_rate": 2.5999400657701314e-05, |
|
"loss": 0.0982, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.424712210893631, |
|
"learning_rate": 2.598036664563893e-05, |
|
"loss": 0.0884, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.18902163207530975, |
|
"learning_rate": 2.596129446406072e-05, |
|
"loss": 0.0453, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.4779305160045624, |
|
"learning_rate": 2.594218417926464e-05, |
|
"loss": 0.1217, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.3665257394313812, |
|
"learning_rate": 2.592303585768111e-05, |
|
"loss": 0.0791, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.22645382583141327, |
|
"learning_rate": 2.590384956587277e-05, |
|
"loss": 0.0384, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.3394840359687805, |
|
"learning_rate": 2.5884625370534242e-05, |
|
"loss": 0.0853, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5719820857048035, |
|
"learning_rate": 2.5865363338491916e-05, |
|
"loss": 0.1058, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5341434478759766, |
|
"learning_rate": 2.5846063536703706e-05, |
|
"loss": 0.0955, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.27542081475257874, |
|
"learning_rate": 2.582672603225882e-05, |
|
"loss": 0.0547, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.4750405550003052, |
|
"learning_rate": 2.5807350892377517e-05, |
|
"loss": 0.152, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.292294979095459, |
|
"learning_rate": 2.5787938184410902e-05, |
|
"loss": 0.0877, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.33273130655288696, |
|
"learning_rate": 2.5768487975840655e-05, |
|
"loss": 0.0721, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.27887821197509766, |
|
"learning_rate": 2.5749000334278825e-05, |
|
"loss": 0.0603, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.35635143518447876, |
|
"learning_rate": 2.572947532746758e-05, |
|
"loss": 0.1011, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5215089321136475, |
|
"learning_rate": 2.570991302327897e-05, |
|
"loss": 0.1295, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_loss": 0.07854457199573517, |
|
"eval_runtime": 14.6749, |
|
"eval_samples_per_second": 32.505, |
|
"eval_steps_per_second": 8.177, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.3576556444168091, |
|
"learning_rate": 2.569031348971471e-05, |
|
"loss": 0.0801, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.36309999227523804, |
|
"learning_rate": 2.5670676794905915e-05, |
|
"loss": 0.0694, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.6117097735404968, |
|
"learning_rate": 2.5651003007112892e-05, |
|
"loss": 0.111, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.217342346906662, |
|
"learning_rate": 2.5631292194724883e-05, |
|
"loss": 0.0346, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.4748762249946594, |
|
"learning_rate": 2.561154442625983e-05, |
|
"loss": 0.1427, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.295284241437912, |
|
"learning_rate": 2.559175977036415e-05, |
|
"loss": 0.083, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.29578328132629395, |
|
"learning_rate": 2.5571938295812476e-05, |
|
"loss": 0.06, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.22991441190242767, |
|
"learning_rate": 2.555208007150743e-05, |
|
"loss": 0.092, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.23324204981327057, |
|
"learning_rate": 2.553218516647939e-05, |
|
"loss": 0.0972, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.40380072593688965, |
|
"learning_rate": 2.5512253649886237e-05, |
|
"loss": 0.0752, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.3805236518383026, |
|
"learning_rate": 2.5492285591013118e-05, |
|
"loss": 0.0998, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.22794733941555023, |
|
"learning_rate": 2.5472281059272213e-05, |
|
"loss": 0.0747, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.22835515439510345, |
|
"learning_rate": 2.5452240124202477e-05, |
|
"loss": 0.0797, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.27887535095214844, |
|
"learning_rate": 2.5432162855469422e-05, |
|
"loss": 0.0499, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.2706398069858551, |
|
"learning_rate": 2.5412049322864847e-05, |
|
"loss": 0.052, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.22984421253204346, |
|
"learning_rate": 2.539189959630662e-05, |
|
"loss": 0.0735, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.21597078442573547, |
|
"learning_rate": 2.537171374583843e-05, |
|
"loss": 0.0619, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.1989249587059021, |
|
"learning_rate": 2.535149184162952e-05, |
|
"loss": 0.0438, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5477291345596313, |
|
"learning_rate": 2.533123395397448e-05, |
|
"loss": 0.1213, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.2946613132953644, |
|
"learning_rate": 2.5310940153292978e-05, |
|
"loss": 0.0751, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.3890973627567291, |
|
"learning_rate": 2.5290610510129518e-05, |
|
"loss": 0.0904, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.4777461588382721, |
|
"learning_rate": 2.5270245095153198e-05, |
|
"loss": 0.0436, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.3443584442138672, |
|
"learning_rate": 2.524984397915747e-05, |
|
"loss": 0.0327, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.45741167664527893, |
|
"learning_rate": 2.5229407233059887e-05, |
|
"loss": 0.1213, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.3687816858291626, |
|
"learning_rate": 2.5208934927901857e-05, |
|
"loss": 0.1082, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.46452489495277405, |
|
"learning_rate": 2.5188427134848395e-05, |
|
"loss": 0.0841, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.23272164165973663, |
|
"learning_rate": 2.5167883925187878e-05, |
|
"loss": 0.0564, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.48221856355667114, |
|
"learning_rate": 2.51473053703318e-05, |
|
"loss": 0.12, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5240137577056885, |
|
"learning_rate": 2.5126691541814518e-05, |
|
"loss": 0.0835, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.48311519622802734, |
|
"learning_rate": 2.510604251129301e-05, |
|
"loss": 0.1015, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.29986709356307983, |
|
"learning_rate": 2.5085358350546612e-05, |
|
"loss": 0.0827, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.4454290270805359, |
|
"learning_rate": 2.506463913147679e-05, |
|
"loss": 0.1309, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.305890828371048, |
|
"learning_rate": 2.5043884926106873e-05, |
|
"loss": 0.0636, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.25751793384552, |
|
"learning_rate": 2.5023095806581802e-05, |
|
"loss": 0.0857, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.4034629166126251, |
|
"learning_rate": 2.5002271845167896e-05, |
|
"loss": 0.1383, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.2389378845691681, |
|
"learning_rate": 2.4981413114252588e-05, |
|
"loss": 0.0411, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.4818887412548065, |
|
"learning_rate": 2.4960519686344168e-05, |
|
"loss": 0.0837, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.32815420627593994, |
|
"learning_rate": 2.4939591634071544e-05, |
|
"loss": 0.0903, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.1801011711359024, |
|
"learning_rate": 2.491862903018398e-05, |
|
"loss": 0.0827, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.23406219482421875, |
|
"learning_rate": 2.4897631947550857e-05, |
|
"loss": 0.0887, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.20487089455127716, |
|
"learning_rate": 2.4876600459161397e-05, |
|
"loss": 0.0944, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.2795143127441406, |
|
"learning_rate": 2.4855534638124427e-05, |
|
"loss": 0.0721, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.2514597475528717, |
|
"learning_rate": 2.4834434557668126e-05, |
|
"loss": 0.0798, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.24707554280757904, |
|
"learning_rate": 2.4813300291139754e-05, |
|
"loss": 0.0875, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.2397790551185608, |
|
"learning_rate": 2.4792131912005407e-05, |
|
"loss": 0.0497, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.4203518331050873, |
|
"learning_rate": 2.4770929493849773e-05, |
|
"loss": 0.0789, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.23552100360393524, |
|
"learning_rate": 2.4749693110375856e-05, |
|
"loss": 0.0682, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.19143146276474, |
|
"learning_rate": 2.4728422835404735e-05, |
|
"loss": 0.0779, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.2365415096282959, |
|
"learning_rate": 2.4707118742875293e-05, |
|
"loss": 0.0442, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.36645641922950745, |
|
"learning_rate": 2.4685780906843975e-05, |
|
"loss": 0.1043, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.20781370997428894, |
|
"learning_rate": 2.4664409401484522e-05, |
|
"loss": 0.0797, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.27483153343200684, |
|
"learning_rate": 2.4643004301087716e-05, |
|
"loss": 0.0455, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.2820053696632385, |
|
"learning_rate": 2.462156568006112e-05, |
|
"loss": 0.0615, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.3784559369087219, |
|
"learning_rate": 2.4600093612928812e-05, |
|
"loss": 0.1338, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.46063724160194397, |
|
"learning_rate": 2.457858817433115e-05, |
|
"loss": 0.0897, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.320568323135376, |
|
"learning_rate": 2.4557049439024488e-05, |
|
"loss": 0.0747, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.2996703088283539, |
|
"learning_rate": 2.4535477481880923e-05, |
|
"loss": 0.1219, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.46839097142219543, |
|
"learning_rate": 2.451387237788804e-05, |
|
"loss": 0.1104, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.3005930185317993, |
|
"learning_rate": 2.4492234202148643e-05, |
|
"loss": 0.0755, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.2897649109363556, |
|
"learning_rate": 2.44705630298805e-05, |
|
"loss": 0.0798, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.2890012860298157, |
|
"learning_rate": 2.4448858936416093e-05, |
|
"loss": 0.1019, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.2596171200275421, |
|
"learning_rate": 2.442712199720232e-05, |
|
"loss": 0.0835, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.4037571847438812, |
|
"learning_rate": 2.4405352287800268e-05, |
|
"loss": 0.0822, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.337190181016922, |
|
"learning_rate": 2.4383549883884954e-05, |
|
"loss": 0.0356, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.3917272686958313, |
|
"learning_rate": 2.4361714861245017e-05, |
|
"loss": 0.0966, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5287500023841858, |
|
"learning_rate": 2.4339847295782508e-05, |
|
"loss": 0.0706, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.29515090584754944, |
|
"learning_rate": 2.431794726351258e-05, |
|
"loss": 0.054, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.30174991488456726, |
|
"learning_rate": 2.4296014840563266e-05, |
|
"loss": 0.0768, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.33137279748916626, |
|
"learning_rate": 2.4274050103175195e-05, |
|
"loss": 0.1008, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.19129472970962524, |
|
"learning_rate": 2.42520531277013e-05, |
|
"loss": 0.0556, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.3620724081993103, |
|
"learning_rate": 2.423002399060661e-05, |
|
"loss": 0.0604, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.28499534726142883, |
|
"learning_rate": 2.420796276846793e-05, |
|
"loss": 0.067, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.3519887328147888, |
|
"learning_rate": 2.4185869537973613e-05, |
|
"loss": 0.0973, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.34655264019966125, |
|
"learning_rate": 2.4163744375923272e-05, |
|
"loss": 0.0425, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.5716597437858582, |
|
"learning_rate": 2.4141587359227514e-05, |
|
"loss": 0.1659, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.32810407876968384, |
|
"learning_rate": 2.4119398564907685e-05, |
|
"loss": 0.116, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.32078537344932556, |
|
"learning_rate": 2.4097178070095602e-05, |
|
"loss": 0.0646, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.389475554227829, |
|
"learning_rate": 2.4074925952033263e-05, |
|
"loss": 0.1275, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.15961997210979462, |
|
"learning_rate": 2.4052642288072594e-05, |
|
"loss": 0.0427, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.2679911255836487, |
|
"learning_rate": 2.4030327155675192e-05, |
|
"loss": 0.0868, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.2590488791465759, |
|
"learning_rate": 2.4007980632412034e-05, |
|
"loss": 0.0476, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.4470207989215851, |
|
"learning_rate": 2.398560279596323e-05, |
|
"loss": 0.0644, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.2822036147117615, |
|
"learning_rate": 2.3963193724117715e-05, |
|
"loss": 0.06, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.3097975552082062, |
|
"learning_rate": 2.394075349477302e-05, |
|
"loss": 0.0481, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.30676892399787903, |
|
"learning_rate": 2.3918282185934986e-05, |
|
"loss": 0.0794, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.23650003969669342, |
|
"learning_rate": 2.3895779875717486e-05, |
|
"loss": 0.0653, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.2657279968261719, |
|
"learning_rate": 2.3873246642342163e-05, |
|
"loss": 0.093, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.24548004567623138, |
|
"learning_rate": 2.3850682564138145e-05, |
|
"loss": 0.0659, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.5615881085395813, |
|
"learning_rate": 2.3828087719541787e-05, |
|
"loss": 0.1278, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.27736374735832214, |
|
"learning_rate": 2.3805462187096402e-05, |
|
"loss": 0.0575, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.24056284129619598, |
|
"learning_rate": 2.3782806045451963e-05, |
|
"loss": 0.0452, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.4982932507991791, |
|
"learning_rate": 2.376011937336485e-05, |
|
"loss": 0.1689, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.29784631729125977, |
|
"learning_rate": 2.373740224969758e-05, |
|
"loss": 0.0938, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.43975648283958435, |
|
"learning_rate": 2.371465475341852e-05, |
|
"loss": 0.1245, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.30927592515945435, |
|
"learning_rate": 2.369187696360161e-05, |
|
"loss": 0.0619, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.36457571387290955, |
|
"learning_rate": 2.3669068959426107e-05, |
|
"loss": 0.0859, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.25598183274269104, |
|
"learning_rate": 2.364623082017629e-05, |
|
"loss": 0.0461, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.44431087374687195, |
|
"learning_rate": 2.3623362625241193e-05, |
|
"loss": 0.1439, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.24602153897285461, |
|
"learning_rate": 2.3600464454114326e-05, |
|
"loss": 0.0408, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.2084829956293106, |
|
"learning_rate": 2.3577536386393416e-05, |
|
"loss": 0.0856, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.22140052914619446, |
|
"learning_rate": 2.35545785017801e-05, |
|
"loss": 0.0721, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.24179723858833313, |
|
"learning_rate": 2.3531590880079663e-05, |
|
"loss": 0.0909, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.3095971345901489, |
|
"learning_rate": 2.3508573601200767e-05, |
|
"loss": 0.0804, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.2845352292060852, |
|
"learning_rate": 2.348552674515517e-05, |
|
"loss": 0.0462, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.4917078912258148, |
|
"learning_rate": 2.3462450392057437e-05, |
|
"loss": 0.1184, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.33279237151145935, |
|
"learning_rate": 2.343934462212467e-05, |
|
"loss": 0.0531, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.33190539479255676, |
|
"learning_rate": 2.341620951567624e-05, |
|
"loss": 0.1114, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.5710054039955139, |
|
"learning_rate": 2.339304515313348e-05, |
|
"loss": 0.1531, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.20240336656570435, |
|
"learning_rate": 2.3369851615019436e-05, |
|
"loss": 0.0624, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.4915199279785156, |
|
"learning_rate": 2.3346628981958565e-05, |
|
"loss": 0.1463, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.29593560099601746, |
|
"learning_rate": 2.332337733467646e-05, |
|
"loss": 0.0789, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.1819988489151001, |
|
"learning_rate": 2.3300096753999585e-05, |
|
"loss": 0.0724, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.39005154371261597, |
|
"learning_rate": 2.3276787320854967e-05, |
|
"loss": 0.083, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.4818272590637207, |
|
"learning_rate": 2.3253449116269937e-05, |
|
"loss": 0.0906, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.3471298813819885, |
|
"learning_rate": 2.3230082221371834e-05, |
|
"loss": 0.0952, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2572653591632843, |
|
"learning_rate": 2.3206686717387742e-05, |
|
"loss": 0.115, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.407372385263443, |
|
"learning_rate": 2.3183262685644177e-05, |
|
"loss": 0.1084, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.16921907663345337, |
|
"learning_rate": 2.3159810207566832e-05, |
|
"loss": 0.0694, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2879027724266052, |
|
"learning_rate": 2.3136329364680288e-05, |
|
"loss": 0.0927, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.23834727704524994, |
|
"learning_rate": 2.3112820238607716e-05, |
|
"loss": 0.0772, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.29006141424179077, |
|
"learning_rate": 2.3089282911070613e-05, |
|
"loss": 0.0934, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.4761680066585541, |
|
"learning_rate": 2.3065717463888505e-05, |
|
"loss": 0.1083, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.2773534953594208, |
|
"learning_rate": 2.3042123978978665e-05, |
|
"loss": 0.0802, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.24215322732925415, |
|
"learning_rate": 2.3018502538355827e-05, |
|
"loss": 0.0661, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.21285729110240936, |
|
"learning_rate": 2.2994853224131915e-05, |
|
"loss": 0.0709, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.1974431574344635, |
|
"learning_rate": 2.2971176118515734e-05, |
|
"loss": 0.0857, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.2114921659231186, |
|
"learning_rate": 2.2947471303812708e-05, |
|
"loss": 0.0715, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.23644877970218658, |
|
"learning_rate": 2.2923738862424565e-05, |
|
"loss": 0.086, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.2784081697463989, |
|
"learning_rate": 2.2899978876849084e-05, |
|
"loss": 0.0392, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.20647476613521576, |
|
"learning_rate": 2.287619142967979e-05, |
|
"loss": 0.0479, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.4792695641517639, |
|
"learning_rate": 2.285237660360566e-05, |
|
"loss": 0.1084, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.32381659746170044, |
|
"learning_rate": 2.2828534481410847e-05, |
|
"loss": 0.0916, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.5321642160415649, |
|
"learning_rate": 2.28046651459744e-05, |
|
"loss": 0.155, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.2742505669593811, |
|
"learning_rate": 2.278076868026995e-05, |
|
"loss": 0.0725, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.40313002467155457, |
|
"learning_rate": 2.2756845167365452e-05, |
|
"loss": 0.0856, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.2948872745037079, |
|
"learning_rate": 2.273289469042287e-05, |
|
"loss": 0.0646, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.23548893630504608, |
|
"learning_rate": 2.2708917332697908e-05, |
|
"loss": 0.0451, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.25398510694503784, |
|
"learning_rate": 2.26849131775397e-05, |
|
"loss": 0.0647, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.30981212854385376, |
|
"learning_rate": 2.2660882308390547e-05, |
|
"loss": 0.077, |
|
"step": 565 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1695, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 565, |
|
"total_flos": 5.169945694856806e+16, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|