|
{ |
|
"best_metric": 28.23428448830723, |
|
"best_model_checkpoint": "./output/small/yt-special-batch4/checkpoint-5000", |
|
"epoch": 3.965107057890563, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4e-07, |
|
"loss": 1.5065, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.400000000000001e-07, |
|
"loss": 1.1712, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.44e-06, |
|
"loss": 1.3501, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.94e-06, |
|
"loss": 2.3945, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.42e-06, |
|
"loss": 4.8304, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.92e-06, |
|
"loss": 5.0208, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.4200000000000007e-06, |
|
"loss": 8.8304, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.900000000000001e-06, |
|
"loss": 13.7868, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.4e-06, |
|
"loss": 17.6539, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.880000000000001e-06, |
|
"loss": 26.5598, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.36e-06, |
|
"loss": 45.766, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.86e-06, |
|
"loss": 35.7199, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 6.360000000000001e-06, |
|
"loss": 54.9605, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 6.860000000000001e-06, |
|
"loss": 55.4295, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.360000000000001e-06, |
|
"loss": 65.0878, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.860000000000001e-06, |
|
"loss": 69.5386, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.36e-06, |
|
"loss": 73.0867, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.84e-06, |
|
"loss": 94.3113, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.340000000000002e-06, |
|
"loss": 87.2808, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.84e-06, |
|
"loss": 98.8404, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.962222222222224e-06, |
|
"loss": 112.2076, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.906666666666668e-06, |
|
"loss": 103.5807, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.851111111111112e-06, |
|
"loss": 120.3094, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.795555555555556e-06, |
|
"loss": 123.5213, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.74e-06, |
|
"loss": 139.2144, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.684444444444446e-06, |
|
"loss": 115.0782, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.62888888888889e-06, |
|
"loss": 130.8006, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.573333333333334e-06, |
|
"loss": 137.604, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.517777777777778e-06, |
|
"loss": 145.0352, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.466666666666667e-06, |
|
"loss": 126.0557, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.415555555555556e-06, |
|
"loss": 105.6157, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.360000000000002e-06, |
|
"loss": 151.1266, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.304444444444444e-06, |
|
"loss": 142.0032, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.24888888888889e-06, |
|
"loss": 139.6682, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.193333333333334e-06, |
|
"loss": 140.1143, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.137777777777778e-06, |
|
"loss": 139.1344, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.082222222222224e-06, |
|
"loss": 133.7512, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.026666666666666e-06, |
|
"loss": 151.7043, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 8.971111111111112e-06, |
|
"loss": 135.3954, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.915555555555556e-06, |
|
"loss": 125.5137, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"eval_loss": 129.90090942382812, |
|
"eval_runtime": 2724.157, |
|
"eval_samples_per_second": 1.851, |
|
"eval_steps_per_second": 0.926, |
|
"eval_wer": 149.0002145462347, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.860000000000002e-06, |
|
"loss": 145.6737, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.804444444444446e-06, |
|
"loss": 144.7789, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 8.74888888888889e-06, |
|
"loss": 131.0249, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.693333333333334e-06, |
|
"loss": 143.5189, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 8.637777777777778e-06, |
|
"loss": 124.5124, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.582222222222223e-06, |
|
"loss": 119.1228, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 8.526666666666667e-06, |
|
"loss": 124.7488, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.471111111111112e-06, |
|
"loss": 122.202, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 8.415555555555556e-06, |
|
"loss": 116.7528, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.36e-06, |
|
"loss": 131.7794, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 8.304444444444445e-06, |
|
"loss": 124.1896, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 8.24888888888889e-06, |
|
"loss": 112.4703, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 8.193333333333335e-06, |
|
"loss": 114.8219, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.137777777777779e-06, |
|
"loss": 101.7731, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.082222222222223e-06, |
|
"loss": 98.6509, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.026666666666667e-06, |
|
"loss": 104.1523, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 7.971111111111111e-06, |
|
"loss": 92.43, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.915555555555557e-06, |
|
"loss": 94.9851, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.860000000000001e-06, |
|
"loss": 95.6963, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.804444444444445e-06, |
|
"loss": 87.4435, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 7.748888888888889e-06, |
|
"loss": 89.9063, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.693333333333333e-06, |
|
"loss": 94.5089, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.637777777777779e-06, |
|
"loss": 90.6531, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.582222222222223e-06, |
|
"loss": 78.8492, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.526666666666668e-06, |
|
"loss": 83.7677, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.471111111111111e-06, |
|
"loss": 81.6059, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 7.415555555555556e-06, |
|
"loss": 80.4736, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 7.360000000000001e-06, |
|
"loss": 85.1648, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 7.304444444444445e-06, |
|
"loss": 83.8457, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 7.24888888888889e-06, |
|
"loss": 85.8395, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 7.1933333333333345e-06, |
|
"loss": 78.4769, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 7.137777777777778e-06, |
|
"loss": 78.3302, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 7.0822222222222226e-06, |
|
"loss": 90.1422, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 7.0266666666666674e-06, |
|
"loss": 77.1199, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 6.9711111111111115e-06, |
|
"loss": 64.2029, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 6.915555555555556e-06, |
|
"loss": 71.5758, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 6.860000000000001e-06, |
|
"loss": 70.353, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 6.8044444444444444e-06, |
|
"loss": 71.4797, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 6.748888888888889e-06, |
|
"loss": 70.792, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 6.693333333333334e-06, |
|
"loss": 67.2464, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"eval_loss": 59.317176818847656, |
|
"eval_runtime": 3546.7342, |
|
"eval_samples_per_second": 1.422, |
|
"eval_steps_per_second": 0.711, |
|
"eval_wer": 298.8671958807123, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 6.637777777777778e-06, |
|
"loss": 66.5926, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 6.582222222222223e-06, |
|
"loss": 63.0575, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 6.526666666666666e-06, |
|
"loss": 64.5554, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 6.471111111111111e-06, |
|
"loss": 69.3081, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 6.415555555555556e-06, |
|
"loss": 59.7389, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 6.360000000000001e-06, |
|
"loss": 62.1109, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 6.304444444444445e-06, |
|
"loss": 70.6619, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 6.24888888888889e-06, |
|
"loss": 57.8578, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 6.193333333333333e-06, |
|
"loss": 62.461, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 6.137777777777778e-06, |
|
"loss": 57.0994, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 6.082222222222223e-06, |
|
"loss": 62.7162, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 6.026666666666668e-06, |
|
"loss": 54.8485, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 5.971111111111112e-06, |
|
"loss": 56.2333, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 5.915555555555556e-06, |
|
"loss": 56.7746, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 5.86e-06, |
|
"loss": 56.3926, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 5.804444444444445e-06, |
|
"loss": 55.1382, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 5.7488888888888896e-06, |
|
"loss": 57.5388, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.6933333333333344e-06, |
|
"loss": 49.7817, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.6377777777777785e-06, |
|
"loss": 51.4976, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.5822222222222225e-06, |
|
"loss": 59.6299, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.5266666666666666e-06, |
|
"loss": 55.4231, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.4711111111111114e-06, |
|
"loss": 47.1009, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.415555555555556e-06, |
|
"loss": 51.1057, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.36e-06, |
|
"loss": 41.7412, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.304444444444445e-06, |
|
"loss": 45.0973, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 5.248888888888889e-06, |
|
"loss": 43.8409, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 5.193333333333333e-06, |
|
"loss": 40.3858, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 5.137777777777778e-06, |
|
"loss": 41.3208, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 5.082222222222223e-06, |
|
"loss": 40.5827, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 5.026666666666667e-06, |
|
"loss": 34.6682, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.971111111111111e-06, |
|
"loss": 36.5551, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 4.915555555555556e-06, |
|
"loss": 35.1971, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.86e-06, |
|
"loss": 41.1468, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.804444444444445e-06, |
|
"loss": 37.0954, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.74888888888889e-06, |
|
"loss": 36.1367, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.693333333333334e-06, |
|
"loss": 37.777, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.637777777777778e-06, |
|
"loss": 36.123, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.582222222222223e-06, |
|
"loss": 38.9094, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.526666666666667e-06, |
|
"loss": 34.1404, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.471111111111112e-06, |
|
"loss": 34.7799, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"eval_loss": 28.52435302734375, |
|
"eval_runtime": 2528.7422, |
|
"eval_samples_per_second": 1.994, |
|
"eval_steps_per_second": 0.997, |
|
"eval_wer": 134.92598154902382, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.415555555555556e-06, |
|
"loss": 30.6602, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.360000000000001e-06, |
|
"loss": 30.5431, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 4.304444444444445e-06, |
|
"loss": 31.4769, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.248888888888889e-06, |
|
"loss": 31.7108, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 4.1933333333333336e-06, |
|
"loss": 32.7686, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 4.1377777777777784e-06, |
|
"loss": 30.9958, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 4.0822222222222225e-06, |
|
"loss": 31.8733, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 4.026666666666667e-06, |
|
"loss": 30.6928, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 3.971111111111111e-06, |
|
"loss": 32.7095, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 3.9155555555555554e-06, |
|
"loss": 29.5839, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 3.86e-06, |
|
"loss": 31.0112, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 3.8044444444444443e-06, |
|
"loss": 28.1384, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 3.7488888888888892e-06, |
|
"loss": 29.8798, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 3.6933333333333337e-06, |
|
"loss": 26.3054, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 3.6377777777777777e-06, |
|
"loss": 32.3702, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 3.5822222222222226e-06, |
|
"loss": 26.3976, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 3.526666666666667e-06, |
|
"loss": 28.478, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.471111111111111e-06, |
|
"loss": 29.9708, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.415555555555556e-06, |
|
"loss": 28.754, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.3600000000000004e-06, |
|
"loss": 26.5715, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3044444444444445e-06, |
|
"loss": 25.6172, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.2488888888888894e-06, |
|
"loss": 25.3106, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.193333333333334e-06, |
|
"loss": 23.8701, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.137777777777778e-06, |
|
"loss": 24.291, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.0822222222222227e-06, |
|
"loss": 21.8997, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.0266666666666668e-06, |
|
"loss": 25.2987, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.9711111111111112e-06, |
|
"loss": 23.5505, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.915555555555556e-06, |
|
"loss": 25.5157, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 2.86e-06, |
|
"loss": 21.3761, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 2.8044444444444446e-06, |
|
"loss": 18.6819, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.748888888888889e-06, |
|
"loss": 23.4894, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 2.6933333333333335e-06, |
|
"loss": 18.3305, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 2.637777777777778e-06, |
|
"loss": 14.2677, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 2.5822222222222224e-06, |
|
"loss": 16.5562, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 2.526666666666667e-06, |
|
"loss": 13.7633, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 2.4711111111111114e-06, |
|
"loss": 17.7606, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 2.415555555555556e-06, |
|
"loss": 12.0998, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 2.3600000000000003e-06, |
|
"loss": 13.1156, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 2.3044444444444447e-06, |
|
"loss": 15.2432, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 2.248888888888889e-06, |
|
"loss": 13.5007, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"eval_loss": 12.516227722167969, |
|
"eval_runtime": 2000.0344, |
|
"eval_samples_per_second": 2.521, |
|
"eval_steps_per_second": 1.261, |
|
"eval_wer": 51.14567689337052, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 2.1933333333333332e-06, |
|
"loss": 15.0485, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 2.137777777777778e-06, |
|
"loss": 10.7393, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 2.0822222222222226e-06, |
|
"loss": 11.9499, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 2.0266666666666666e-06, |
|
"loss": 13.4629, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.971111111111111e-06, |
|
"loss": 13.1648, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.915555555555556e-06, |
|
"loss": 14.3279, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.8600000000000002e-06, |
|
"loss": 11.6005, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.8044444444444444e-06, |
|
"loss": 10.6341, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 1.7488888888888891e-06, |
|
"loss": 15.247, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.6933333333333336e-06, |
|
"loss": 10.8269, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.6377777777777778e-06, |
|
"loss": 13.0179, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 1.5822222222222223e-06, |
|
"loss": 9.0973, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.526666666666667e-06, |
|
"loss": 11.6001, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 1.4711111111111112e-06, |
|
"loss": 10.1239, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.4155555555555556e-06, |
|
"loss": 11.6063, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 1.3600000000000001e-06, |
|
"loss": 10.262, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 1.3044444444444446e-06, |
|
"loss": 12.105, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 1.248888888888889e-06, |
|
"loss": 9.206, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.1933333333333335e-06, |
|
"loss": 9.362, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.137777777777778e-06, |
|
"loss": 8.732, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 1.0822222222222222e-06, |
|
"loss": 8.9712, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.0266666666666669e-06, |
|
"loss": 9.5471, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 9.711111111111111e-07, |
|
"loss": 8.8118, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 9.155555555555557e-07, |
|
"loss": 7.7686, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 8.6e-07, |
|
"loss": 9.4581, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 8.044444444444445e-07, |
|
"loss": 7.6693, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 7.48888888888889e-07, |
|
"loss": 7.7522, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 6.933333333333334e-07, |
|
"loss": 7.9061, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 6.377777777777779e-07, |
|
"loss": 6.7074, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 5.822222222222223e-07, |
|
"loss": 10.0562, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 5.266666666666667e-07, |
|
"loss": 8.842, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 4.7111111111111113e-07, |
|
"loss": 6.8116, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 4.155555555555556e-07, |
|
"loss": 6.9662, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.6e-07, |
|
"loss": 7.8678, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.0444444444444445e-07, |
|
"loss": 6.2642, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 2.488888888888889e-07, |
|
"loss": 5.2553, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.9333333333333337e-07, |
|
"loss": 6.6561, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 1.3777777777777778e-07, |
|
"loss": 7.6037, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 8.222222222222223e-08, |
|
"loss": 6.5388, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 2.6666666666666667e-08, |
|
"loss": 7.3781, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"eval_loss": 3.6843667030334473, |
|
"eval_runtime": 1981.4795, |
|
"eval_samples_per_second": 2.545, |
|
"eval_steps_per_second": 1.273, |
|
"eval_wer": 28.23428448830723, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"step": 5000, |
|
"total_flos": 5.77084225019904e+18, |
|
"train_loss": 53.45060002822876, |
|
"train_runtime": 17302.3407, |
|
"train_samples_per_second": 1.156, |
|
"train_steps_per_second": 0.289 |
|
} |
|
], |
|
"max_steps": 5000, |
|
"num_train_epochs": 4, |
|
"total_flos": 5.77084225019904e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|