Spaces:
Paused
Paused
{ | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 29.734513274336283, | |
"global_step": 1680, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.18, | |
"learning_rate": 0.000999929189777604, | |
"loss": 4.7362, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.35, | |
"learning_rate": 0.0009996844394494005, | |
"loss": 4.2612, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.53, | |
"learning_rate": 0.0009992649603785806, | |
"loss": 4.2216, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.71, | |
"learning_rate": 0.0009986708992478202, | |
"loss": 4.1602, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.88, | |
"learning_rate": 0.0009979024637873308, | |
"loss": 4.0982, | |
"step": 50 | |
}, | |
{ | |
"epoch": 1.06, | |
"learning_rate": 0.0009969599227022219, | |
"loss": 4.0312, | |
"step": 60 | |
}, | |
{ | |
"epoch": 1.24, | |
"learning_rate": 0.000995843605578539, | |
"loss": 3.7638, | |
"step": 70 | |
}, | |
{ | |
"epoch": 1.42, | |
"learning_rate": 0.0009945539027680166, | |
"loss": 3.7872, | |
"step": 80 | |
}, | |
{ | |
"epoch": 1.59, | |
"learning_rate": 0.0009930912652515787, | |
"loss": 3.7943, | |
"step": 90 | |
}, | |
{ | |
"epoch": 1.77, | |
"learning_rate": 0.0009914562044816423, | |
"loss": 3.753, | |
"step": 100 | |
}, | |
{ | |
"epoch": 1.95, | |
"learning_rate": 0.0009900243933150229, | |
"loss": 3.7757, | |
"step": 110 | |
}, | |
{ | |
"epoch": 2.12, | |
"learning_rate": 0.000988080451875917, | |
"loss": 3.4788, | |
"step": 120 | |
}, | |
{ | |
"epoch": 2.3, | |
"learning_rate": 0.0009859658393553863, | |
"loss": 3.2514, | |
"step": 130 | |
}, | |
{ | |
"epoch": 2.48, | |
"learning_rate": 0.0009836812951872773, | |
"loss": 3.3447, | |
"step": 140 | |
}, | |
{ | |
"epoch": 2.65, | |
"learning_rate": 0.0009812276182268236, | |
"loss": 3.4213, | |
"step": 150 | |
}, | |
{ | |
"epoch": 2.83, | |
"learning_rate": 0.000978605666471306, | |
"loss": 3.3397, | |
"step": 160 | |
}, | |
{ | |
"epoch": 3.01, | |
"learning_rate": 0.0009758163567600282, | |
"loss": 3.3895, | |
"step": 170 | |
}, | |
{ | |
"epoch": 3.19, | |
"learning_rate": 0.0009728606644537177, | |
"loss": 2.8609, | |
"step": 180 | |
}, | |
{ | |
"epoch": 3.36, | |
"learning_rate": 0.0009697396230934648, | |
"loss": 2.775, | |
"step": 190 | |
}, | |
{ | |
"epoch": 3.54, | |
"learning_rate": 0.0009664543240393149, | |
"loss": 2.9453, | |
"step": 200 | |
}, | |
{ | |
"epoch": 3.72, | |
"learning_rate": 0.0009630059160886439, | |
"loss": 2.9014, | |
"step": 210 | |
}, | |
{ | |
"epoch": 3.89, | |
"learning_rate": 0.0009593956050744492, | |
"loss": 3.0505, | |
"step": 220 | |
}, | |
{ | |
"epoch": 4.07, | |
"learning_rate": 0.0009556246534436953, | |
"loss": 2.6717, | |
"step": 230 | |
}, | |
{ | |
"epoch": 4.25, | |
"learning_rate": 0.0009516943798158648, | |
"loss": 2.5435, | |
"step": 240 | |
}, | |
{ | |
"epoch": 4.42, | |
"learning_rate": 0.0009476061585218652, | |
"loss": 2.4999, | |
"step": 250 | |
}, | |
{ | |
"epoch": 4.6, | |
"learning_rate": 0.0009433614191234554, | |
"loss": 2.4868, | |
"step": 260 | |
}, | |
{ | |
"epoch": 4.78, | |
"learning_rate": 0.0009389616459133597, | |
"loss": 2.4924, | |
"step": 270 | |
}, | |
{ | |
"epoch": 4.96, | |
"learning_rate": 0.0009344083773962423, | |
"loss": 2.6032, | |
"step": 280 | |
}, | |
{ | |
"epoch": 5.13, | |
"learning_rate": 0.0009297032057507264, | |
"loss": 2.1659, | |
"step": 290 | |
}, | |
{ | |
"epoch": 5.31, | |
"learning_rate": 0.0009248477762726437, | |
"loss": 2.222, | |
"step": 300 | |
}, | |
{ | |
"epoch": 5.49, | |
"learning_rate": 0.0009198437867997105, | |
"loss": 2.0641, | |
"step": 310 | |
}, | |
{ | |
"epoch": 5.66, | |
"learning_rate": 0.0009146929871178311, | |
"loss": 2.2005, | |
"step": 320 | |
}, | |
{ | |
"epoch": 5.84, | |
"learning_rate": 0.0009093971783492354, | |
"loss": 2.2747, | |
"step": 330 | |
}, | |
{ | |
"epoch": 6.02, | |
"learning_rate": 0.000903958212322666, | |
"loss": 2.2078, | |
"step": 340 | |
}, | |
{ | |
"epoch": 6.19, | |
"learning_rate": 0.0008983779909258347, | |
"loss": 1.7158, | |
"step": 350 | |
}, | |
{ | |
"epoch": 6.37, | |
"learning_rate": 0.0008926584654403724, | |
"loss": 1.7397, | |
"step": 360 | |
}, | |
{ | |
"epoch": 6.55, | |
"learning_rate": 0.0008868016358595103, | |
"loss": 2.0346, | |
"step": 370 | |
}, | |
{ | |
"epoch": 6.73, | |
"learning_rate": 0.0008808095501887243, | |
"loss": 1.8981, | |
"step": 380 | |
}, | |
{ | |
"epoch": 6.9, | |
"learning_rate": 0.0008746843037295936, | |
"loss": 1.9037, | |
"step": 390 | |
}, | |
{ | |
"epoch": 7.08, | |
"learning_rate": 0.0008684280383471181, | |
"loss": 1.7414, | |
"step": 400 | |
}, | |
{ | |
"epoch": 7.26, | |
"learning_rate": 0.0008620429417207562, | |
"loss": 1.5215, | |
"step": 410 | |
}, | |
{ | |
"epoch": 7.43, | |
"learning_rate": 0.0008555312465794402, | |
"loss": 1.5522, | |
"step": 420 | |
}, | |
{ | |
"epoch": 7.61, | |
"learning_rate": 0.0008488952299208401, | |
"loss": 1.5898, | |
"step": 430 | |
}, | |
{ | |
"epoch": 7.79, | |
"learning_rate": 0.0008421372122151469, | |
"loss": 1.692, | |
"step": 440 | |
}, | |
{ | |
"epoch": 7.96, | |
"learning_rate": 0.0008352595565936554, | |
"loss": 1.6656, | |
"step": 450 | |
}, | |
{ | |
"epoch": 8.14, | |
"learning_rate": 0.0008282646680224282, | |
"loss": 1.3681, | |
"step": 460 | |
}, | |
{ | |
"epoch": 8.32, | |
"learning_rate": 0.0008211549924613316, | |
"loss": 1.323, | |
"step": 470 | |
}, | |
{ | |
"epoch": 8.5, | |
"learning_rate": 0.0008139330160087374, | |
"loss": 1.301, | |
"step": 480 | |
}, | |
{ | |
"epoch": 8.67, | |
"learning_rate": 0.0008066012640321876, | |
"loss": 1.4134, | |
"step": 490 | |
}, | |
{ | |
"epoch": 8.85, | |
"learning_rate": 0.0007991623002853296, | |
"loss": 1.4661, | |
"step": 500 | |
}, | |
{ | |
"epoch": 9.03, | |
"learning_rate": 0.0007916187260114262, | |
"loss": 1.4292, | |
"step": 510 | |
}, | |
{ | |
"epoch": 9.2, | |
"learning_rate": 0.0007839731790337593, | |
"loss": 0.8971, | |
"step": 520 | |
}, | |
{ | |
"epoch": 9.38, | |
"learning_rate": 0.0007762283328332388, | |
"loss": 1.2906, | |
"step": 530 | |
}, | |
{ | |
"epoch": 9.56, | |
"learning_rate": 0.000768386895613546, | |
"loss": 1.2389, | |
"step": 540 | |
}, | |
{ | |
"epoch": 9.73, | |
"learning_rate": 0.0007604516093541333, | |
"loss": 1.2767, | |
"step": 550 | |
}, | |
{ | |
"epoch": 9.91, | |
"learning_rate": 0.0007524252488514144, | |
"loss": 1.1868, | |
"step": 560 | |
}, | |
{ | |
"epoch": 10.09, | |
"learning_rate": 0.0007443106207484776, | |
"loss": 1.0139, | |
"step": 570 | |
}, | |
{ | |
"epoch": 10.27, | |
"learning_rate": 0.0007361105625536646, | |
"loss": 1.021, | |
"step": 580 | |
}, | |
{ | |
"epoch": 10.44, | |
"learning_rate": 0.0007278279416483561, | |
"loss": 0.9439, | |
"step": 590 | |
}, | |
{ | |
"epoch": 10.62, | |
"learning_rate": 0.0007194656542843102, | |
"loss": 0.9772, | |
"step": 600 | |
}, | |
{ | |
"epoch": 10.8, | |
"learning_rate": 0.0007110266245709072, | |
"loss": 1.0697, | |
"step": 610 | |
}, | |
{ | |
"epoch": 10.97, | |
"learning_rate": 0.0007025138034526517, | |
"loss": 1.1471, | |
"step": 620 | |
}, | |
{ | |
"epoch": 11.15, | |
"learning_rate": 0.0006939301676772927, | |
"loss": 0.8037, | |
"step": 630 | |
}, | |
{ | |
"epoch": 11.33, | |
"learning_rate": 0.0006852787187549182, | |
"loss": 0.8488, | |
"step": 640 | |
}, | |
{ | |
"epoch": 11.5, | |
"learning_rate": 0.0006765624819083944, | |
"loss": 0.8035, | |
"step": 650 | |
}, | |
{ | |
"epoch": 11.68, | |
"learning_rate": 0.0006677845050155106, | |
"loss": 0.8825, | |
"step": 660 | |
}, | |
{ | |
"epoch": 11.86, | |
"learning_rate": 0.0006589478575432023, | |
"loss": 0.8639, | |
"step": 670 | |
}, | |
{ | |
"epoch": 12.04, | |
"learning_rate": 0.0006500556294742265, | |
"loss": 0.9365, | |
"step": 680 | |
}, | |
{ | |
"epoch": 12.21, | |
"learning_rate": 0.0006411109302266615, | |
"loss": 0.704, | |
"step": 690 | |
}, | |
{ | |
"epoch": 12.39, | |
"learning_rate": 0.0006321168875666118, | |
"loss": 0.7477, | |
"step": 700 | |
}, | |
{ | |
"epoch": 12.57, | |
"learning_rate": 0.0006230766465144965, | |
"loss": 0.6058, | |
"step": 710 | |
}, | |
{ | |
"epoch": 12.74, | |
"learning_rate": 0.0006139933682453035, | |
"loss": 0.809, | |
"step": 720 | |
}, | |
{ | |
"epoch": 12.92, | |
"learning_rate": 0.0006048702289831964, | |
"loss": 0.8475, | |
"step": 730 | |
}, | |
{ | |
"epoch": 13.1, | |
"learning_rate": 0.0005957104188908586, | |
"loss": 0.6654, | |
"step": 740 | |
}, | |
{ | |
"epoch": 13.27, | |
"learning_rate": 0.0005865171409539613, | |
"loss": 0.6185, | |
"step": 750 | |
}, | |
{ | |
"epoch": 13.45, | |
"learning_rate": 0.0005772936098611519, | |
"loss": 0.4949, | |
"step": 760 | |
}, | |
{ | |
"epoch": 13.63, | |
"learning_rate": 0.0005680430508799453, | |
"loss": 0.5332, | |
"step": 770 | |
}, | |
{ | |
"epoch": 13.81, | |
"learning_rate": 0.0005587686987289189, | |
"loss": 0.6924, | |
"step": 780 | |
}, | |
{ | |
"epoch": 13.98, | |
"learning_rate": 0.0005494737964466028, | |
"loss": 0.6796, | |
"step": 790 | |
}, | |
{ | |
"epoch": 14.16, | |
"learning_rate": 0.0005401615942574595, | |
"loss": 0.5147, | |
"step": 800 | |
}, | |
{ | |
"epoch": 14.34, | |
"learning_rate": 0.0005308353484353508, | |
"loss": 0.4158, | |
"step": 810 | |
}, | |
{ | |
"epoch": 14.51, | |
"learning_rate": 0.0005214983201648908, | |
"loss": 0.5493, | |
"step": 820 | |
}, | |
{ | |
"epoch": 14.69, | |
"learning_rate": 0.0005121537744010808, | |
"loss": 0.5508, | |
"step": 830 | |
}, | |
{ | |
"epoch": 14.87, | |
"learning_rate": 0.0005028049787276249, | |
"loss": 0.513, | |
"step": 840 | |
}, | |
{ | |
"epoch": 15.04, | |
"learning_rate": 0.0004934552022143279, | |
"loss": 0.5027, | |
"step": 850 | |
}, | |
{ | |
"epoch": 15.22, | |
"learning_rate": 0.00048410771427397304, | |
"loss": 0.3954, | |
"step": 860 | |
}, | |
{ | |
"epoch": 15.4, | |
"learning_rate": 0.0004747657835190795, | |
"loss": 0.4376, | |
"step": 870 | |
}, | |
{ | |
"epoch": 15.58, | |
"learning_rate": 0.0004654326766189404, | |
"loss": 0.4182, | |
"step": 880 | |
}, | |
{ | |
"epoch": 15.75, | |
"learning_rate": 0.0004561116571573388, | |
"loss": 0.4889, | |
"step": 890 | |
}, | |
{ | |
"epoch": 15.93, | |
"learning_rate": 0.00044680598449134434, | |
"loss": 0.3596, | |
"step": 900 | |
}, | |
{ | |
"epoch": 16.11, | |
"learning_rate": 0.00043751891261158783, | |
"loss": 0.3129, | |
"step": 910 | |
}, | |
{ | |
"epoch": 16.28, | |
"learning_rate": 0.0004282536890044104, | |
"loss": 0.3088, | |
"step": 920 | |
}, | |
{ | |
"epoch": 16.46, | |
"learning_rate": 0.0004190135535162894, | |
"loss": 0.3212, | |
"step": 930 | |
}, | |
{ | |
"epoch": 16.64, | |
"learning_rate": 0.00040980173722093496, | |
"loss": 0.4326, | |
"step": 940 | |
}, | |
{ | |
"epoch": 16.81, | |
"learning_rate": 0.00040062146128945275, | |
"loss": 0.3264, | |
"step": 950 | |
}, | |
{ | |
"epoch": 16.99, | |
"learning_rate": 0.0003914759358639719, | |
"loss": 0.3636, | |
"step": 960 | |
}, | |
{ | |
"epoch": 17.17, | |
"learning_rate": 0.0003823683589351284, | |
"loss": 0.2175, | |
"step": 970 | |
}, | |
{ | |
"epoch": 17.35, | |
"learning_rate": 0.0003733019152237965, | |
"loss": 0.208, | |
"step": 980 | |
}, | |
{ | |
"epoch": 17.52, | |
"learning_rate": 0.0003642797750674629, | |
"loss": 0.2717, | |
"step": 990 | |
}, | |
{ | |
"epoch": 17.7, | |
"learning_rate": 0.0003553050933116273, | |
"loss": 0.329, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 17.88, | |
"learning_rate": 0.0003463810082066231, | |
"loss": 0.33, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 18.05, | |
"learning_rate": 0.00033751064031023887, | |
"loss": 0.2679, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 18.23, | |
"learning_rate": 0.0003286970913965275, | |
"loss": 0.224, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 18.41, | |
"learning_rate": 0.00031994344337118454, | |
"loss": 0.2048, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 18.58, | |
"learning_rate": 0.0003112527571938717, | |
"loss": 0.2078, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 18.76, | |
"learning_rate": 0.00030262807180786645, | |
"loss": 0.2303, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 18.94, | |
"learning_rate": 0.0002940724030774104, | |
"loss": 0.2142, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 19.12, | |
"learning_rate": 0.0002855887427331267, | |
"loss": 0.2067, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 19.29, | |
"learning_rate": 0.0002771800573258781, | |
"loss": 0.1458, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 19.47, | |
"learning_rate": 0.0002688492871894276, | |
"loss": 0.1774, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 19.65, | |
"learning_rate": 0.0002605993454122687, | |
"loss": 0.1741, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 19.82, | |
"learning_rate": 0.0002524331168189817, | |
"loss": 0.1883, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 20.0, | |
"learning_rate": 0.00024435345696147403, | |
"loss": 0.1689, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 20.18, | |
"learning_rate": 0.00023636319112045495, | |
"loss": 0.1238, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 20.35, | |
"learning_rate": 0.00022846511331749792, | |
"loss": 0.1372, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 20.53, | |
"learning_rate": 0.00022066198533803106, | |
"loss": 0.1298, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 20.71, | |
"learning_rate": 0.00021295653576560165, | |
"loss": 0.1445, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 20.88, | |
"learning_rate": 0.00020535145902774978, | |
"loss": 0.1454, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 21.06, | |
"learning_rate": 0.00019784941445382642, | |
"loss": 0.1417, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 21.24, | |
"learning_rate": 0.00019045302534508295, | |
"loss": 0.1099, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 21.42, | |
"learning_rate": 0.0001831648780573612, | |
"loss": 0.0922, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 21.59, | |
"learning_rate": 0.00017598752109670014, | |
"loss": 0.1258, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 21.77, | |
"learning_rate": 0.00016892346422817944, | |
"loss": 0.123, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 21.95, | |
"learning_rate": 0.00016197517759830948, | |
"loss": 0.1047, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 22.12, | |
"learning_rate": 0.00015514509087127522, | |
"loss": 0.0991, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 22.3, | |
"learning_rate": 0.00014843559237933475, | |
"loss": 0.1057, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 22.48, | |
"learning_rate": 0.00014184902828767287, | |
"loss": 0.0796, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 22.65, | |
"learning_rate": 0.0001353877017739974, | |
"loss": 0.0747, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 22.83, | |
"learning_rate": 0.00012905387222316822, | |
"loss": 0.0807, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 23.01, | |
"learning_rate": 0.00012284975443714076, | |
"loss": 0.1032, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 23.19, | |
"learning_rate": 0.00011677751786049661, | |
"loss": 0.0743, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 23.36, | |
"learning_rate": 0.0001108392858218371, | |
"loss": 0.0823, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 23.54, | |
"learning_rate": 0.00010503713479130123, | |
"loss": 0.0758, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 23.72, | |
"learning_rate": 9.937309365446973e-05, | |
"loss": 0.0642, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 23.89, | |
"learning_rate": 9.384914300290748e-05, | |
"loss": 0.0791, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 24.07, | |
"learning_rate": 8.846721444159461e-05, | |
"loss": 0.0595, | |
"step": 1360 | |
}, | |
{ | |
"epoch": 24.25, | |
"learning_rate": 8.32291899134856e-05, | |
"loss": 0.0676, | |
"step": 1370 | |
}, | |
{ | |
"epoch": 24.42, | |
"learning_rate": 7.813690104143555e-05, | |
"loss": 0.0611, | |
"step": 1380 | |
}, | |
{ | |
"epoch": 24.6, | |
"learning_rate": 7.319212848772117e-05, | |
"loss": 0.0642, | |
"step": 1390 | |
}, | |
{ | |
"epoch": 24.78, | |
"learning_rate": 6.83966013313826e-05, | |
"loss": 0.0602, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 24.96, | |
"learning_rate": 6.375199646360142e-05, | |
"loss": 0.066, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 25.13, | |
"learning_rate": 5.9259938001329336e-05, | |
"loss": 0.0581, | |
"step": 1420 | |
}, | |
{ | |
"epoch": 25.31, | |
"learning_rate": 5.492199671936915e-05, | |
"loss": 0.0527, | |
"step": 1430 | |
}, | |
{ | |
"epoch": 25.49, | |
"learning_rate": 5.073968950110941e-05, | |
"loss": 0.0634, | |
"step": 1440 | |
}, | |
{ | |
"epoch": 25.66, | |
"learning_rate": 4.6714478808103314e-05, | |
"loss": 0.0545, | |
"step": 1450 | |
}, | |
{ | |
"epoch": 25.84, | |
"learning_rate": 4.2847772168678246e-05, | |
"loss": 0.0549, | |
"step": 1460 | |
}, | |
{ | |
"epoch": 26.02, | |
"learning_rate": 3.9140921685753064e-05, | |
"loss": 0.0529, | |
"step": 1470 | |
}, | |
{ | |
"epoch": 26.19, | |
"learning_rate": 3.559522356403788e-05, | |
"loss": 0.0558, | |
"step": 1480 | |
}, | |
{ | |
"epoch": 26.37, | |
"learning_rate": 3.221191765677928e-05, | |
"loss": 0.0529, | |
"step": 1490 | |
}, | |
{ | |
"epoch": 26.55, | |
"learning_rate": 2.8992187032210516e-05, | |
"loss": 0.0552, | |
"step": 1500 | |
}, | |
{ | |
"epoch": 26.73, | |
"learning_rate": 2.5937157559859093e-05, | |
"loss": 0.0499, | |
"step": 1510 | |
}, | |
{ | |
"epoch": 26.9, | |
"learning_rate": 2.3047897516853767e-05, | |
"loss": 0.0451, | |
"step": 1520 | |
}, | |
{ | |
"epoch": 27.08, | |
"learning_rate": 2.032541721437209e-05, | |
"loss": 0.0511, | |
"step": 1530 | |
}, | |
{ | |
"epoch": 27.26, | |
"learning_rate": 1.777066864435628e-05, | |
"loss": 0.0491, | |
"step": 1540 | |
}, | |
{ | |
"epoch": 27.43, | |
"learning_rate": 1.538454514662285e-05, | |
"loss": 0.045, | |
"step": 1550 | |
}, | |
{ | |
"epoch": 27.61, | |
"learning_rate": 1.3167881096480372e-05, | |
"loss": 0.0594, | |
"step": 1560 | |
}, | |
{ | |
"epoch": 27.79, | |
"learning_rate": 1.1121451612967626e-05, | |
"loss": 0.0456, | |
"step": 1570 | |
}, | |
{ | |
"epoch": 27.96, | |
"learning_rate": 9.245972287810544e-06, | |
"loss": 0.0444, | |
"step": 1580 | |
}, | |
{ | |
"epoch": 28.14, | |
"learning_rate": 7.542098935195918e-06, | |
"loss": 0.0615, | |
"step": 1590 | |
}, | |
{ | |
"epoch": 28.32, | |
"learning_rate": 6.010427362447224e-06, | |
"loss": 0.0456, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 28.5, | |
"learning_rate": 4.6514931616840066e-06, | |
"loss": 0.0445, | |
"step": 1610 | |
}, | |
{ | |
"epoch": 28.67, | |
"learning_rate": 3.4657715225368535e-06, | |
"loss": 0.0494, | |
"step": 1620 | |
}, | |
{ | |
"epoch": 28.85, | |
"learning_rate": 2.453677065984039e-06, | |
"loss": 0.0411, | |
"step": 1630 | |
}, | |
{ | |
"epoch": 29.03, | |
"learning_rate": 1.615563699367495e-06, | |
"loss": 0.0558, | |
"step": 1640 | |
}, | |
{ | |
"epoch": 29.2, | |
"learning_rate": 9.517244926393609e-07, | |
"loss": 0.0457, | |
"step": 1650 | |
}, | |
{ | |
"epoch": 29.38, | |
"learning_rate": 4.623915758816266e-07, | |
"loss": 0.0524, | |
"step": 1660 | |
}, | |
{ | |
"epoch": 29.56, | |
"learning_rate": 1.4773605813522827e-07, | |
"loss": 0.0454, | |
"step": 1670 | |
}, | |
{ | |
"epoch": 29.73, | |
"learning_rate": 7.867967567354306e-09, | |
"loss": 0.055, | |
"step": 1680 | |
}, | |
{ | |
"epoch": 29.73, | |
"step": 1680, | |
"total_flos": 2.133691717066752e+17, | |
"train_loss": 1.0390962482385693, | |
"train_runtime": 7981.5401, | |
"train_samples_per_second": 3.39, | |
"train_steps_per_second": 0.21 | |
} | |
], | |
"max_steps": 1680, | |
"num_train_epochs": 30, | |
"total_flos": 2.133691717066752e+17, | |
"trial_name": null, | |
"trial_params": null | |
} | |