Spaces:
Sleeping
Sleeping
{ | |
"best_global_step": null, | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 3.0, | |
"eval_steps": 500, | |
"global_step": 2391, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.012547051442910916, | |
"grad_norm": 0.716044008731842, | |
"learning_rate": 1.9924717691342537e-05, | |
"loss": 1.0972, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.025094102885821833, | |
"grad_norm": 1.4613193273544312, | |
"learning_rate": 1.9841070681723128e-05, | |
"loss": 1.088, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.037641154328732745, | |
"grad_norm": 0.627024233341217, | |
"learning_rate": 1.9757423672103725e-05, | |
"loss": 1.0716, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.050188205771643665, | |
"grad_norm": 1.224279522895813, | |
"learning_rate": 1.9673776662484318e-05, | |
"loss": 1.0661, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.06273525721455459, | |
"grad_norm": 1.368181586265564, | |
"learning_rate": 1.959012965286491e-05, | |
"loss": 1.035, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.07528230865746549, | |
"grad_norm": 1.0884851217269897, | |
"learning_rate": 1.9506482643245505e-05, | |
"loss": 1.0288, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.08782936010037641, | |
"grad_norm": 1.9844820499420166, | |
"learning_rate": 1.9422835633626102e-05, | |
"loss": 0.9855, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.10037641154328733, | |
"grad_norm": 1.6572856903076172, | |
"learning_rate": 1.9339188624006692e-05, | |
"loss": 0.991, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.11292346298619825, | |
"grad_norm": 2.2602856159210205, | |
"learning_rate": 1.925554161438729e-05, | |
"loss": 1.0092, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.12547051442910917, | |
"grad_norm": 4.111606597900391, | |
"learning_rate": 1.9171894604767882e-05, | |
"loss": 0.9062, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.13801756587202008, | |
"grad_norm": 3.383659601211548, | |
"learning_rate": 1.9088247595148476e-05, | |
"loss": 0.9749, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.15056461731493098, | |
"grad_norm": 1.9777194261550903, | |
"learning_rate": 1.900460058552907e-05, | |
"loss": 0.9193, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.16311166875784192, | |
"grad_norm": 6.869212627410889, | |
"learning_rate": 1.8920953575909663e-05, | |
"loss": 0.9695, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.17565872020075282, | |
"grad_norm": 2.800828695297241, | |
"learning_rate": 1.8837306566290257e-05, | |
"loss": 0.9375, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.18820577164366373, | |
"grad_norm": 3.783500909805298, | |
"learning_rate": 1.875365955667085e-05, | |
"loss": 0.9294, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.20075282308657466, | |
"grad_norm": 2.961916208267212, | |
"learning_rate": 1.8670012547051444e-05, | |
"loss": 0.9651, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.21329987452948557, | |
"grad_norm": 4.599372386932373, | |
"learning_rate": 1.8586365537432037e-05, | |
"loss": 0.9706, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.2258469259723965, | |
"grad_norm": 3.3036141395568848, | |
"learning_rate": 1.8502718527812634e-05, | |
"loss": 0.9289, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.2383939774153074, | |
"grad_norm": 6.022705078125, | |
"learning_rate": 1.8419071518193224e-05, | |
"loss": 0.9015, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.25094102885821834, | |
"grad_norm": 13.482539176940918, | |
"learning_rate": 1.833542450857382e-05, | |
"loss": 0.9033, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.26348808030112925, | |
"grad_norm": 4.047461032867432, | |
"learning_rate": 1.8251777498954414e-05, | |
"loss": 0.8447, | |
"step": 210 | |
}, | |
{ | |
"epoch": 0.27603513174404015, | |
"grad_norm": 2.8806040287017822, | |
"learning_rate": 1.8168130489335008e-05, | |
"loss": 0.8333, | |
"step": 220 | |
}, | |
{ | |
"epoch": 0.28858218318695106, | |
"grad_norm": 5.517472743988037, | |
"learning_rate": 1.80844834797156e-05, | |
"loss": 0.8697, | |
"step": 230 | |
}, | |
{ | |
"epoch": 0.30112923462986196, | |
"grad_norm": 7.646421432495117, | |
"learning_rate": 1.8000836470096195e-05, | |
"loss": 0.9159, | |
"step": 240 | |
}, | |
{ | |
"epoch": 0.3136762860727729, | |
"grad_norm": 4.211366176605225, | |
"learning_rate": 1.791718946047679e-05, | |
"loss": 0.8848, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.32622333751568383, | |
"grad_norm": 7.600694179534912, | |
"learning_rate": 1.7833542450857385e-05, | |
"loss": 0.8085, | |
"step": 260 | |
}, | |
{ | |
"epoch": 0.33877038895859474, | |
"grad_norm": 4.872680187225342, | |
"learning_rate": 1.7749895441237976e-05, | |
"loss": 0.8301, | |
"step": 270 | |
}, | |
{ | |
"epoch": 0.35131744040150564, | |
"grad_norm": 6.380251884460449, | |
"learning_rate": 1.7666248431618572e-05, | |
"loss": 0.9484, | |
"step": 280 | |
}, | |
{ | |
"epoch": 0.36386449184441655, | |
"grad_norm": 4.365920066833496, | |
"learning_rate": 1.7582601421999166e-05, | |
"loss": 0.9457, | |
"step": 290 | |
}, | |
{ | |
"epoch": 0.37641154328732745, | |
"grad_norm": 3.710908889770508, | |
"learning_rate": 1.749895441237976e-05, | |
"loss": 0.8773, | |
"step": 300 | |
}, | |
{ | |
"epoch": 0.3889585947302384, | |
"grad_norm": 4.279157638549805, | |
"learning_rate": 1.7415307402760353e-05, | |
"loss": 0.8892, | |
"step": 310 | |
}, | |
{ | |
"epoch": 0.4015056461731493, | |
"grad_norm": 17.142894744873047, | |
"learning_rate": 1.7331660393140947e-05, | |
"loss": 0.8723, | |
"step": 320 | |
}, | |
{ | |
"epoch": 0.41405269761606023, | |
"grad_norm": 3.349973440170288, | |
"learning_rate": 1.724801338352154e-05, | |
"loss": 0.8092, | |
"step": 330 | |
}, | |
{ | |
"epoch": 0.42659974905897113, | |
"grad_norm": 7.343029022216797, | |
"learning_rate": 1.7164366373902134e-05, | |
"loss": 0.9051, | |
"step": 340 | |
}, | |
{ | |
"epoch": 0.43914680050188204, | |
"grad_norm": 3.02512264251709, | |
"learning_rate": 1.7080719364282727e-05, | |
"loss": 0.8959, | |
"step": 350 | |
}, | |
{ | |
"epoch": 0.451693851944793, | |
"grad_norm": 13.072802543640137, | |
"learning_rate": 1.699707235466332e-05, | |
"loss": 0.9339, | |
"step": 360 | |
}, | |
{ | |
"epoch": 0.4642409033877039, | |
"grad_norm": 3.5314481258392334, | |
"learning_rate": 1.6913425345043917e-05, | |
"loss": 0.8883, | |
"step": 370 | |
}, | |
{ | |
"epoch": 0.4767879548306148, | |
"grad_norm": 3.5592713356018066, | |
"learning_rate": 1.6829778335424508e-05, | |
"loss": 0.9539, | |
"step": 380 | |
}, | |
{ | |
"epoch": 0.4893350062735257, | |
"grad_norm": 2.2764039039611816, | |
"learning_rate": 1.6746131325805104e-05, | |
"loss": 0.9251, | |
"step": 390 | |
}, | |
{ | |
"epoch": 0.5018820577164367, | |
"grad_norm": 2.953474998474121, | |
"learning_rate": 1.6662484316185698e-05, | |
"loss": 0.9561, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.5144291091593476, | |
"grad_norm": 2.250779628753662, | |
"learning_rate": 1.657883730656629e-05, | |
"loss": 0.9405, | |
"step": 410 | |
}, | |
{ | |
"epoch": 0.5269761606022585, | |
"grad_norm": 5.86421537399292, | |
"learning_rate": 1.6495190296946885e-05, | |
"loss": 0.8936, | |
"step": 420 | |
}, | |
{ | |
"epoch": 0.5395232120451694, | |
"grad_norm": 4.82138729095459, | |
"learning_rate": 1.6411543287327482e-05, | |
"loss": 0.8553, | |
"step": 430 | |
}, | |
{ | |
"epoch": 0.5520702634880803, | |
"grad_norm": 3.090364456176758, | |
"learning_rate": 1.6327896277708072e-05, | |
"loss": 0.9059, | |
"step": 440 | |
}, | |
{ | |
"epoch": 0.5646173149309912, | |
"grad_norm": 5.363993167877197, | |
"learning_rate": 1.624424926808867e-05, | |
"loss": 0.9631, | |
"step": 450 | |
}, | |
{ | |
"epoch": 0.5771643663739021, | |
"grad_norm": 3.4371540546417236, | |
"learning_rate": 1.616060225846926e-05, | |
"loss": 0.794, | |
"step": 460 | |
}, | |
{ | |
"epoch": 0.589711417816813, | |
"grad_norm": 4.080657482147217, | |
"learning_rate": 1.6076955248849856e-05, | |
"loss": 0.9348, | |
"step": 470 | |
}, | |
{ | |
"epoch": 0.6022584692597239, | |
"grad_norm": 5.351539134979248, | |
"learning_rate": 1.599330823923045e-05, | |
"loss": 0.9286, | |
"step": 480 | |
}, | |
{ | |
"epoch": 0.6148055207026348, | |
"grad_norm": 3.6495888233184814, | |
"learning_rate": 1.5909661229611043e-05, | |
"loss": 0.7525, | |
"step": 490 | |
}, | |
{ | |
"epoch": 0.6273525721455459, | |
"grad_norm": 4.849205017089844, | |
"learning_rate": 1.5826014219991636e-05, | |
"loss": 0.9534, | |
"step": 500 | |
}, | |
{ | |
"epoch": 0.6398996235884568, | |
"grad_norm": 4.923251152038574, | |
"learning_rate": 1.574236721037223e-05, | |
"loss": 0.9681, | |
"step": 510 | |
}, | |
{ | |
"epoch": 0.6524466750313677, | |
"grad_norm": 4.8032073974609375, | |
"learning_rate": 1.5658720200752824e-05, | |
"loss": 0.8674, | |
"step": 520 | |
}, | |
{ | |
"epoch": 0.6649937264742786, | |
"grad_norm": 6.583972454071045, | |
"learning_rate": 1.5575073191133417e-05, | |
"loss": 0.8463, | |
"step": 530 | |
}, | |
{ | |
"epoch": 0.6775407779171895, | |
"grad_norm": 6.276474475860596, | |
"learning_rate": 1.5491426181514014e-05, | |
"loss": 0.8481, | |
"step": 540 | |
}, | |
{ | |
"epoch": 0.6900878293601004, | |
"grad_norm": 6.980823040008545, | |
"learning_rate": 1.5407779171894604e-05, | |
"loss": 0.8441, | |
"step": 550 | |
}, | |
{ | |
"epoch": 0.7026348808030113, | |
"grad_norm": 5.919472694396973, | |
"learning_rate": 1.53241321622752e-05, | |
"loss": 0.7705, | |
"step": 560 | |
}, | |
{ | |
"epoch": 0.7151819322459222, | |
"grad_norm": 14.738715171813965, | |
"learning_rate": 1.5240485152655793e-05, | |
"loss": 0.8883, | |
"step": 570 | |
}, | |
{ | |
"epoch": 0.7277289836888331, | |
"grad_norm": 9.944400787353516, | |
"learning_rate": 1.5156838143036388e-05, | |
"loss": 0.843, | |
"step": 580 | |
}, | |
{ | |
"epoch": 0.740276035131744, | |
"grad_norm": 5.619649410247803, | |
"learning_rate": 1.5073191133416981e-05, | |
"loss": 0.9996, | |
"step": 590 | |
}, | |
{ | |
"epoch": 0.7528230865746549, | |
"grad_norm": 7.122910499572754, | |
"learning_rate": 1.4989544123797575e-05, | |
"loss": 0.9, | |
"step": 600 | |
}, | |
{ | |
"epoch": 0.7653701380175659, | |
"grad_norm": 10.766443252563477, | |
"learning_rate": 1.490589711417817e-05, | |
"loss": 0.8326, | |
"step": 610 | |
}, | |
{ | |
"epoch": 0.7779171894604768, | |
"grad_norm": 4.103264808654785, | |
"learning_rate": 1.4822250104558764e-05, | |
"loss": 0.8552, | |
"step": 620 | |
}, | |
{ | |
"epoch": 0.7904642409033877, | |
"grad_norm": 11.64675521850586, | |
"learning_rate": 1.4738603094939357e-05, | |
"loss": 0.774, | |
"step": 630 | |
}, | |
{ | |
"epoch": 0.8030112923462986, | |
"grad_norm": 6.035724639892578, | |
"learning_rate": 1.465495608531995e-05, | |
"loss": 0.9782, | |
"step": 640 | |
}, | |
{ | |
"epoch": 0.8155583437892095, | |
"grad_norm": 6.475778102874756, | |
"learning_rate": 1.4571309075700546e-05, | |
"loss": 0.7997, | |
"step": 650 | |
}, | |
{ | |
"epoch": 0.8281053952321205, | |
"grad_norm": 5.5077314376831055, | |
"learning_rate": 1.4487662066081138e-05, | |
"loss": 0.7985, | |
"step": 660 | |
}, | |
{ | |
"epoch": 0.8406524466750314, | |
"grad_norm": 9.788359642028809, | |
"learning_rate": 1.4404015056461733e-05, | |
"loss": 0.9237, | |
"step": 670 | |
}, | |
{ | |
"epoch": 0.8531994981179423, | |
"grad_norm": 5.642941951751709, | |
"learning_rate": 1.4320368046842325e-05, | |
"loss": 0.7795, | |
"step": 680 | |
}, | |
{ | |
"epoch": 0.8657465495608532, | |
"grad_norm": 7.9442315101623535, | |
"learning_rate": 1.423672103722292e-05, | |
"loss": 0.891, | |
"step": 690 | |
}, | |
{ | |
"epoch": 0.8782936010037641, | |
"grad_norm": 4.023198127746582, | |
"learning_rate": 1.4153074027603515e-05, | |
"loss": 0.8391, | |
"step": 700 | |
}, | |
{ | |
"epoch": 0.890840652446675, | |
"grad_norm": 11.08212661743164, | |
"learning_rate": 1.4069427017984107e-05, | |
"loss": 0.9049, | |
"step": 710 | |
}, | |
{ | |
"epoch": 0.903387703889586, | |
"grad_norm": 7.001065254211426, | |
"learning_rate": 1.3985780008364702e-05, | |
"loss": 0.9636, | |
"step": 720 | |
}, | |
{ | |
"epoch": 0.9159347553324969, | |
"grad_norm": 7.488030433654785, | |
"learning_rate": 1.3902132998745297e-05, | |
"loss": 0.8491, | |
"step": 730 | |
}, | |
{ | |
"epoch": 0.9284818067754078, | |
"grad_norm": 5.319072246551514, | |
"learning_rate": 1.381848598912589e-05, | |
"loss": 0.921, | |
"step": 740 | |
}, | |
{ | |
"epoch": 0.9410288582183187, | |
"grad_norm": 7.3197126388549805, | |
"learning_rate": 1.3734838979506484e-05, | |
"loss": 0.9866, | |
"step": 750 | |
}, | |
{ | |
"epoch": 0.9535759096612296, | |
"grad_norm": 5.280470848083496, | |
"learning_rate": 1.3651191969887078e-05, | |
"loss": 0.8146, | |
"step": 760 | |
}, | |
{ | |
"epoch": 0.9661229611041405, | |
"grad_norm": 3.304518938064575, | |
"learning_rate": 1.3567544960267671e-05, | |
"loss": 0.8724, | |
"step": 770 | |
}, | |
{ | |
"epoch": 0.9786700125470514, | |
"grad_norm": 3.306471109390259, | |
"learning_rate": 1.3483897950648267e-05, | |
"loss": 0.8796, | |
"step": 780 | |
}, | |
{ | |
"epoch": 0.9912170639899623, | |
"grad_norm": 5.108331203460693, | |
"learning_rate": 1.3400250941028858e-05, | |
"loss": 0.8931, | |
"step": 790 | |
}, | |
{ | |
"epoch": 1.0037641154328734, | |
"grad_norm": 9.46453857421875, | |
"learning_rate": 1.3316603931409454e-05, | |
"loss": 0.8834, | |
"step": 800 | |
}, | |
{ | |
"epoch": 1.0163111668757843, | |
"grad_norm": 8.54848861694336, | |
"learning_rate": 1.3232956921790047e-05, | |
"loss": 0.7073, | |
"step": 810 | |
}, | |
{ | |
"epoch": 1.0288582183186952, | |
"grad_norm": 8.689090728759766, | |
"learning_rate": 1.314930991217064e-05, | |
"loss": 0.8242, | |
"step": 820 | |
}, | |
{ | |
"epoch": 1.041405269761606, | |
"grad_norm": 8.000664710998535, | |
"learning_rate": 1.3065662902551234e-05, | |
"loss": 0.8314, | |
"step": 830 | |
}, | |
{ | |
"epoch": 1.053952321204517, | |
"grad_norm": 11.315167427062988, | |
"learning_rate": 1.298201589293183e-05, | |
"loss": 0.6794, | |
"step": 840 | |
}, | |
{ | |
"epoch": 1.066499372647428, | |
"grad_norm": 6.037744522094727, | |
"learning_rate": 1.2898368883312423e-05, | |
"loss": 0.7796, | |
"step": 850 | |
}, | |
{ | |
"epoch": 1.0790464240903388, | |
"grad_norm": 9.521272659301758, | |
"learning_rate": 1.2814721873693016e-05, | |
"loss": 0.92, | |
"step": 860 | |
}, | |
{ | |
"epoch": 1.0915934755332497, | |
"grad_norm": 18.728450775146484, | |
"learning_rate": 1.2731074864073612e-05, | |
"loss": 0.8371, | |
"step": 870 | |
}, | |
{ | |
"epoch": 1.1041405269761606, | |
"grad_norm": 7.143416404724121, | |
"learning_rate": 1.2647427854454203e-05, | |
"loss": 0.8534, | |
"step": 880 | |
}, | |
{ | |
"epoch": 1.1166875784190715, | |
"grad_norm": 7.581761837005615, | |
"learning_rate": 1.2563780844834799e-05, | |
"loss": 0.894, | |
"step": 890 | |
}, | |
{ | |
"epoch": 1.1292346298619824, | |
"grad_norm": 3.4014008045196533, | |
"learning_rate": 1.248013383521539e-05, | |
"loss": 0.8155, | |
"step": 900 | |
}, | |
{ | |
"epoch": 1.1417816813048933, | |
"grad_norm": 13.978635787963867, | |
"learning_rate": 1.2396486825595986e-05, | |
"loss": 0.754, | |
"step": 910 | |
}, | |
{ | |
"epoch": 1.1543287327478042, | |
"grad_norm": 5.18642520904541, | |
"learning_rate": 1.2312839815976581e-05, | |
"loss": 0.8493, | |
"step": 920 | |
}, | |
{ | |
"epoch": 1.1668757841907151, | |
"grad_norm": 4.153924942016602, | |
"learning_rate": 1.2229192806357173e-05, | |
"loss": 0.813, | |
"step": 930 | |
}, | |
{ | |
"epoch": 1.179422835633626, | |
"grad_norm": 9.386292457580566, | |
"learning_rate": 1.2145545796737768e-05, | |
"loss": 0.7526, | |
"step": 940 | |
}, | |
{ | |
"epoch": 1.191969887076537, | |
"grad_norm": 5.997525215148926, | |
"learning_rate": 1.2061898787118363e-05, | |
"loss": 0.8602, | |
"step": 950 | |
}, | |
{ | |
"epoch": 1.2045169385194479, | |
"grad_norm": 7.475282669067383, | |
"learning_rate": 1.1978251777498955e-05, | |
"loss": 0.6822, | |
"step": 960 | |
}, | |
{ | |
"epoch": 1.2170639899623588, | |
"grad_norm": 11.12500286102295, | |
"learning_rate": 1.189460476787955e-05, | |
"loss": 0.7505, | |
"step": 970 | |
}, | |
{ | |
"epoch": 1.2296110414052697, | |
"grad_norm": 10.952397346496582, | |
"learning_rate": 1.1810957758260144e-05, | |
"loss": 0.8621, | |
"step": 980 | |
}, | |
{ | |
"epoch": 1.2421580928481806, | |
"grad_norm": 16.098909378051758, | |
"learning_rate": 1.1727310748640737e-05, | |
"loss": 0.8263, | |
"step": 990 | |
}, | |
{ | |
"epoch": 1.2547051442910915, | |
"grad_norm": 7.630945682525635, | |
"learning_rate": 1.164366373902133e-05, | |
"loss": 0.7283, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 1.2672521957340024, | |
"grad_norm": 5.213379859924316, | |
"learning_rate": 1.1560016729401924e-05, | |
"loss": 0.7849, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 1.2797992471769133, | |
"grad_norm": 9.576950073242188, | |
"learning_rate": 1.147636971978252e-05, | |
"loss": 0.7845, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 1.2923462986198244, | |
"grad_norm": 7.526945114135742, | |
"learning_rate": 1.1392722710163113e-05, | |
"loss": 0.8226, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 1.3048933500627353, | |
"grad_norm": 8.773786544799805, | |
"learning_rate": 1.1309075700543706e-05, | |
"loss": 0.7912, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 1.3174404015056462, | |
"grad_norm": 5.114284038543701, | |
"learning_rate": 1.12254286909243e-05, | |
"loss": 0.7414, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 1.3299874529485571, | |
"grad_norm": 6.488368988037109, | |
"learning_rate": 1.1141781681304895e-05, | |
"loss": 0.8588, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 1.342534504391468, | |
"grad_norm": 6.658433437347412, | |
"learning_rate": 1.1058134671685487e-05, | |
"loss": 0.9214, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 1.355081555834379, | |
"grad_norm": 10.264144897460938, | |
"learning_rate": 1.0974487662066082e-05, | |
"loss": 0.7492, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 1.3676286072772899, | |
"grad_norm": 7.86182165145874, | |
"learning_rate": 1.0890840652446677e-05, | |
"loss": 0.7919, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 1.3801756587202008, | |
"grad_norm": 7.3965535163879395, | |
"learning_rate": 1.080719364282727e-05, | |
"loss": 0.8695, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 1.3927227101631117, | |
"grad_norm": 9.31979751586914, | |
"learning_rate": 1.0723546633207864e-05, | |
"loss": 0.7495, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 1.4052697616060226, | |
"grad_norm": 9.471373558044434, | |
"learning_rate": 1.0639899623588456e-05, | |
"loss": 0.8729, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 1.4178168130489335, | |
"grad_norm": 13.156733512878418, | |
"learning_rate": 1.0556252613969051e-05, | |
"loss": 0.8688, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 1.4303638644918444, | |
"grad_norm": 13.249898910522461, | |
"learning_rate": 1.0472605604349647e-05, | |
"loss": 0.8097, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 1.4429109159347553, | |
"grad_norm": 6.98345947265625, | |
"learning_rate": 1.0388958594730238e-05, | |
"loss": 0.7635, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 1.4554579673776662, | |
"grad_norm": 18.78076171875, | |
"learning_rate": 1.0305311585110834e-05, | |
"loss": 0.957, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 1.468005018820577, | |
"grad_norm": 25.165569305419922, | |
"learning_rate": 1.0221664575491427e-05, | |
"loss": 0.7616, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 1.480552070263488, | |
"grad_norm": 8.38501262664795, | |
"learning_rate": 1.013801756587202e-05, | |
"loss": 0.7768, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 1.4930991217063991, | |
"grad_norm": 10.194948196411133, | |
"learning_rate": 1.0054370556252616e-05, | |
"loss": 0.8924, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 1.50564617314931, | |
"grad_norm": 10.704676628112793, | |
"learning_rate": 9.97072354663321e-06, | |
"loss": 0.9066, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 1.518193224592221, | |
"grad_norm": 21.507707595825195, | |
"learning_rate": 9.887076537013803e-06, | |
"loss": 0.9013, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 1.5307402760351319, | |
"grad_norm": 8.45915699005127, | |
"learning_rate": 9.803429527394396e-06, | |
"loss": 1.0474, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 1.5432873274780428, | |
"grad_norm": 5.320367336273193, | |
"learning_rate": 9.71978251777499e-06, | |
"loss": 0.8313, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 1.5558343789209537, | |
"grad_norm": 10.026762962341309, | |
"learning_rate": 9.636135508155583e-06, | |
"loss": 0.8881, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 1.5683814303638646, | |
"grad_norm": 9.318647384643555, | |
"learning_rate": 9.552488498536177e-06, | |
"loss": 0.8274, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 1.5809284818067755, | |
"grad_norm": 9.438924789428711, | |
"learning_rate": 9.468841488916772e-06, | |
"loss": 0.9072, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 1.5934755332496864, | |
"grad_norm": 11.8627290725708, | |
"learning_rate": 9.385194479297366e-06, | |
"loss": 0.746, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 1.6060225846925973, | |
"grad_norm": 5.647990703582764, | |
"learning_rate": 9.30154746967796e-06, | |
"loss": 0.8526, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 1.6185696361355082, | |
"grad_norm": 17.41633415222168, | |
"learning_rate": 9.217900460058554e-06, | |
"loss": 0.7216, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 1.631116687578419, | |
"grad_norm": 10.73770523071289, | |
"learning_rate": 9.134253450439148e-06, | |
"loss": 0.7405, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 1.64366373902133, | |
"grad_norm": 9.316720962524414, | |
"learning_rate": 9.050606440819741e-06, | |
"loss": 0.727, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 1.656210790464241, | |
"grad_norm": 8.067049980163574, | |
"learning_rate": 8.966959431200335e-06, | |
"loss": 0.7992, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 1.6687578419071518, | |
"grad_norm": 23.564279556274414, | |
"learning_rate": 8.88331242158093e-06, | |
"loss": 0.7617, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 1.6813048933500627, | |
"grad_norm": 10.805792808532715, | |
"learning_rate": 8.799665411961524e-06, | |
"loss": 0.9413, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 1.6938519447929736, | |
"grad_norm": 8.173852920532227, | |
"learning_rate": 8.716018402342117e-06, | |
"loss": 0.8204, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 1.7063989962358845, | |
"grad_norm": 16.444934844970703, | |
"learning_rate": 8.63237139272271e-06, | |
"loss": 0.7124, | |
"step": 1360 | |
}, | |
{ | |
"epoch": 1.7189460476787954, | |
"grad_norm": 19.09043312072754, | |
"learning_rate": 8.548724383103306e-06, | |
"loss": 0.8573, | |
"step": 1370 | |
}, | |
{ | |
"epoch": 1.7314930991217063, | |
"grad_norm": 15.454293251037598, | |
"learning_rate": 8.4650773734839e-06, | |
"loss": 0.7964, | |
"step": 1380 | |
}, | |
{ | |
"epoch": 1.7440401505646173, | |
"grad_norm": 9.219696044921875, | |
"learning_rate": 8.381430363864493e-06, | |
"loss": 0.7494, | |
"step": 1390 | |
}, | |
{ | |
"epoch": 1.7565872020075282, | |
"grad_norm": 12.434812545776367, | |
"learning_rate": 8.297783354245086e-06, | |
"loss": 0.7066, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 1.769134253450439, | |
"grad_norm": 8.64736270904541, | |
"learning_rate": 8.21413634462568e-06, | |
"loss": 0.8049, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 1.78168130489335, | |
"grad_norm": 8.343708038330078, | |
"learning_rate": 8.130489335006273e-06, | |
"loss": 0.7935, | |
"step": 1420 | |
}, | |
{ | |
"epoch": 1.7942283563362609, | |
"grad_norm": 6.48344612121582, | |
"learning_rate": 8.046842325386867e-06, | |
"loss": 0.7431, | |
"step": 1430 | |
}, | |
{ | |
"epoch": 1.8067754077791718, | |
"grad_norm": 7.362094402313232, | |
"learning_rate": 7.963195315767462e-06, | |
"loss": 0.6778, | |
"step": 1440 | |
}, | |
{ | |
"epoch": 1.8193224592220827, | |
"grad_norm": 10.41412353515625, | |
"learning_rate": 7.879548306148056e-06, | |
"loss": 0.8869, | |
"step": 1450 | |
}, | |
{ | |
"epoch": 1.8318695106649936, | |
"grad_norm": 11.075960159301758, | |
"learning_rate": 7.79590129652865e-06, | |
"loss": 0.8913, | |
"step": 1460 | |
}, | |
{ | |
"epoch": 1.8444165621079045, | |
"grad_norm": 3.257545232772827, | |
"learning_rate": 7.712254286909243e-06, | |
"loss": 0.8214, | |
"step": 1470 | |
}, | |
{ | |
"epoch": 1.8569636135508154, | |
"grad_norm": 2.363823175430298, | |
"learning_rate": 7.628607277289838e-06, | |
"loss": 0.8268, | |
"step": 1480 | |
}, | |
{ | |
"epoch": 1.8695106649937263, | |
"grad_norm": 16.772573471069336, | |
"learning_rate": 7.544960267670431e-06, | |
"loss": 0.9585, | |
"step": 1490 | |
}, | |
{ | |
"epoch": 1.8820577164366374, | |
"grad_norm": 23.401941299438477, | |
"learning_rate": 7.461313258051025e-06, | |
"loss": 0.8636, | |
"step": 1500 | |
}, | |
{ | |
"epoch": 1.8946047678795483, | |
"grad_norm": 8.009302139282227, | |
"learning_rate": 7.37766624843162e-06, | |
"loss": 0.6728, | |
"step": 1510 | |
}, | |
{ | |
"epoch": 1.9071518193224593, | |
"grad_norm": 19.989112854003906, | |
"learning_rate": 7.294019238812214e-06, | |
"loss": 0.8526, | |
"step": 1520 | |
}, | |
{ | |
"epoch": 1.9196988707653702, | |
"grad_norm": 11.849247932434082, | |
"learning_rate": 7.210372229192807e-06, | |
"loss": 0.8825, | |
"step": 1530 | |
}, | |
{ | |
"epoch": 1.932245922208281, | |
"grad_norm": 8.389686584472656, | |
"learning_rate": 7.126725219573401e-06, | |
"loss": 0.8261, | |
"step": 1540 | |
}, | |
{ | |
"epoch": 1.944792973651192, | |
"grad_norm": 11.961833000183105, | |
"learning_rate": 7.043078209953995e-06, | |
"loss": 0.6564, | |
"step": 1550 | |
}, | |
{ | |
"epoch": 1.9573400250941029, | |
"grad_norm": 15.514620780944824, | |
"learning_rate": 6.9594312003345885e-06, | |
"loss": 0.7943, | |
"step": 1560 | |
}, | |
{ | |
"epoch": 1.9698870765370138, | |
"grad_norm": 10.584662437438965, | |
"learning_rate": 6.875784190715182e-06, | |
"loss": 0.7593, | |
"step": 1570 | |
}, | |
{ | |
"epoch": 1.9824341279799247, | |
"grad_norm": 15.600502014160156, | |
"learning_rate": 6.7921371810957755e-06, | |
"loss": 0.8643, | |
"step": 1580 | |
}, | |
{ | |
"epoch": 1.9949811794228356, | |
"grad_norm": 11.883516311645508, | |
"learning_rate": 6.708490171476371e-06, | |
"loss": 0.8381, | |
"step": 1590 | |
}, | |
{ | |
"epoch": 2.0075282308657467, | |
"grad_norm": 6.613313674926758, | |
"learning_rate": 6.624843161856964e-06, | |
"loss": 0.8074, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 2.0200752823086576, | |
"grad_norm": 10.316112518310547, | |
"learning_rate": 6.541196152237558e-06, | |
"loss": 0.7161, | |
"step": 1610 | |
}, | |
{ | |
"epoch": 2.0326223337515685, | |
"grad_norm": 34.29435348510742, | |
"learning_rate": 6.457549142618152e-06, | |
"loss": 0.7553, | |
"step": 1620 | |
}, | |
{ | |
"epoch": 2.0451693851944794, | |
"grad_norm": 13.848133087158203, | |
"learning_rate": 6.373902132998746e-06, | |
"loss": 0.7486, | |
"step": 1630 | |
}, | |
{ | |
"epoch": 2.0577164366373903, | |
"grad_norm": 6.787230014801025, | |
"learning_rate": 6.29025512337934e-06, | |
"loss": 0.7867, | |
"step": 1640 | |
}, | |
{ | |
"epoch": 2.0702634880803013, | |
"grad_norm": 12.09325122833252, | |
"learning_rate": 6.2066081137599335e-06, | |
"loss": 0.7999, | |
"step": 1650 | |
}, | |
{ | |
"epoch": 2.082810539523212, | |
"grad_norm": 9.453817367553711, | |
"learning_rate": 6.122961104140528e-06, | |
"loss": 0.7668, | |
"step": 1660 | |
}, | |
{ | |
"epoch": 2.095357590966123, | |
"grad_norm": 10.155051231384277, | |
"learning_rate": 6.039314094521121e-06, | |
"loss": 0.7595, | |
"step": 1670 | |
}, | |
{ | |
"epoch": 2.107904642409034, | |
"grad_norm": 26.19032859802246, | |
"learning_rate": 5.955667084901715e-06, | |
"loss": 0.9065, | |
"step": 1680 | |
}, | |
{ | |
"epoch": 2.120451693851945, | |
"grad_norm": 9.538314819335938, | |
"learning_rate": 5.872020075282308e-06, | |
"loss": 0.7759, | |
"step": 1690 | |
}, | |
{ | |
"epoch": 2.132998745294856, | |
"grad_norm": 6.306014060974121, | |
"learning_rate": 5.788373065662904e-06, | |
"loss": 0.7431, | |
"step": 1700 | |
}, | |
{ | |
"epoch": 2.1455457967377667, | |
"grad_norm": 8.156487464904785, | |
"learning_rate": 5.704726056043497e-06, | |
"loss": 0.7029, | |
"step": 1710 | |
}, | |
{ | |
"epoch": 2.1580928481806776, | |
"grad_norm": 14.053692817687988, | |
"learning_rate": 5.621079046424091e-06, | |
"loss": 0.8357, | |
"step": 1720 | |
}, | |
{ | |
"epoch": 2.1706398996235885, | |
"grad_norm": 10.932027816772461, | |
"learning_rate": 5.537432036804685e-06, | |
"loss": 0.723, | |
"step": 1730 | |
}, | |
{ | |
"epoch": 2.1831869510664994, | |
"grad_norm": 23.12853240966797, | |
"learning_rate": 5.4537850271852785e-06, | |
"loss": 0.819, | |
"step": 1740 | |
}, | |
{ | |
"epoch": 2.1957340025094103, | |
"grad_norm": 10.468832015991211, | |
"learning_rate": 5.370138017565872e-06, | |
"loss": 0.7771, | |
"step": 1750 | |
}, | |
{ | |
"epoch": 2.208281053952321, | |
"grad_norm": 16.84822654724121, | |
"learning_rate": 5.286491007946466e-06, | |
"loss": 0.7895, | |
"step": 1760 | |
}, | |
{ | |
"epoch": 2.220828105395232, | |
"grad_norm": 17.586076736450195, | |
"learning_rate": 5.202843998327061e-06, | |
"loss": 0.8417, | |
"step": 1770 | |
}, | |
{ | |
"epoch": 2.233375156838143, | |
"grad_norm": 5.2970662117004395, | |
"learning_rate": 5.119196988707654e-06, | |
"loss": 0.7451, | |
"step": 1780 | |
}, | |
{ | |
"epoch": 2.245922208281054, | |
"grad_norm": 11.92138385772705, | |
"learning_rate": 5.035549979088248e-06, | |
"loss": 0.8368, | |
"step": 1790 | |
}, | |
{ | |
"epoch": 2.258469259723965, | |
"grad_norm": 22.60080909729004, | |
"learning_rate": 4.951902969468842e-06, | |
"loss": 0.7133, | |
"step": 1800 | |
}, | |
{ | |
"epoch": 2.2710163111668757, | |
"grad_norm": 12.277490615844727, | |
"learning_rate": 4.868255959849436e-06, | |
"loss": 0.8418, | |
"step": 1810 | |
}, | |
{ | |
"epoch": 2.2835633626097867, | |
"grad_norm": 6.862631320953369, | |
"learning_rate": 4.78460895023003e-06, | |
"loss": 0.7564, | |
"step": 1820 | |
}, | |
{ | |
"epoch": 2.2961104140526976, | |
"grad_norm": 7.9091267585754395, | |
"learning_rate": 4.7009619406106235e-06, | |
"loss": 0.7582, | |
"step": 1830 | |
}, | |
{ | |
"epoch": 2.3086574654956085, | |
"grad_norm": 8.295835494995117, | |
"learning_rate": 4.617314930991217e-06, | |
"loss": 0.8465, | |
"step": 1840 | |
}, | |
{ | |
"epoch": 2.3212045169385194, | |
"grad_norm": 11.34477710723877, | |
"learning_rate": 4.533667921371811e-06, | |
"loss": 0.7846, | |
"step": 1850 | |
}, | |
{ | |
"epoch": 2.3337515683814303, | |
"grad_norm": 32.95912170410156, | |
"learning_rate": 4.450020911752405e-06, | |
"loss": 0.6656, | |
"step": 1860 | |
}, | |
{ | |
"epoch": 2.346298619824341, | |
"grad_norm": 17.355579376220703, | |
"learning_rate": 4.366373902132999e-06, | |
"loss": 0.8187, | |
"step": 1870 | |
}, | |
{ | |
"epoch": 2.358845671267252, | |
"grad_norm": 8.741580963134766, | |
"learning_rate": 4.282726892513593e-06, | |
"loss": 0.744, | |
"step": 1880 | |
}, | |
{ | |
"epoch": 2.371392722710163, | |
"grad_norm": 12.800196647644043, | |
"learning_rate": 4.199079882894187e-06, | |
"loss": 0.6891, | |
"step": 1890 | |
}, | |
{ | |
"epoch": 2.383939774153074, | |
"grad_norm": 6.387624740600586, | |
"learning_rate": 4.115432873274781e-06, | |
"loss": 0.6968, | |
"step": 1900 | |
}, | |
{ | |
"epoch": 2.396486825595985, | |
"grad_norm": 9.609091758728027, | |
"learning_rate": 4.031785863655375e-06, | |
"loss": 0.7412, | |
"step": 1910 | |
}, | |
{ | |
"epoch": 2.4090338770388957, | |
"grad_norm": 16.369497299194336, | |
"learning_rate": 3.9481388540359685e-06, | |
"loss": 0.7136, | |
"step": 1920 | |
}, | |
{ | |
"epoch": 2.4215809284818066, | |
"grad_norm": 7.245341777801514, | |
"learning_rate": 3.864491844416563e-06, | |
"loss": 0.6953, | |
"step": 1930 | |
}, | |
{ | |
"epoch": 2.4341279799247175, | |
"grad_norm": 21.487659454345703, | |
"learning_rate": 3.7808448347971568e-06, | |
"loss": 0.776, | |
"step": 1940 | |
}, | |
{ | |
"epoch": 2.4466750313676284, | |
"grad_norm": 13.91763973236084, | |
"learning_rate": 3.6971978251777503e-06, | |
"loss": 0.9644, | |
"step": 1950 | |
}, | |
{ | |
"epoch": 2.4592220828105393, | |
"grad_norm": 16.034120559692383, | |
"learning_rate": 3.6135508155583442e-06, | |
"loss": 0.7451, | |
"step": 1960 | |
}, | |
{ | |
"epoch": 2.4717691342534502, | |
"grad_norm": 15.478806495666504, | |
"learning_rate": 3.5299038059389377e-06, | |
"loss": 0.6669, | |
"step": 1970 | |
}, | |
{ | |
"epoch": 2.484316185696361, | |
"grad_norm": 14.39084529876709, | |
"learning_rate": 3.446256796319532e-06, | |
"loss": 0.7545, | |
"step": 1980 | |
}, | |
{ | |
"epoch": 2.496863237139272, | |
"grad_norm": 27.011598587036133, | |
"learning_rate": 3.3626097867001256e-06, | |
"loss": 0.8599, | |
"step": 1990 | |
}, | |
{ | |
"epoch": 2.509410288582183, | |
"grad_norm": 15.559931755065918, | |
"learning_rate": 3.27896277708072e-06, | |
"loss": 0.8266, | |
"step": 2000 | |
}, | |
{ | |
"epoch": 2.521957340025094, | |
"grad_norm": 8.093729019165039, | |
"learning_rate": 3.1953157674613135e-06, | |
"loss": 0.7978, | |
"step": 2010 | |
}, | |
{ | |
"epoch": 2.5345043914680048, | |
"grad_norm": 13.65913200378418, | |
"learning_rate": 3.1116687578419074e-06, | |
"loss": 0.8145, | |
"step": 2020 | |
}, | |
{ | |
"epoch": 2.5470514429109157, | |
"grad_norm": 10.85305404663086, | |
"learning_rate": 3.028021748222501e-06, | |
"loss": 0.7976, | |
"step": 2030 | |
}, | |
{ | |
"epoch": 2.5595984943538266, | |
"grad_norm": 21.572189331054688, | |
"learning_rate": 2.9443747386030953e-06, | |
"loss": 0.7435, | |
"step": 2040 | |
}, | |
{ | |
"epoch": 2.572145545796738, | |
"grad_norm": 14.962433815002441, | |
"learning_rate": 2.860727728983689e-06, | |
"loss": 0.7856, | |
"step": 2050 | |
}, | |
{ | |
"epoch": 2.584692597239649, | |
"grad_norm": 8.81922435760498, | |
"learning_rate": 2.7770807193642827e-06, | |
"loss": 0.6729, | |
"step": 2060 | |
}, | |
{ | |
"epoch": 2.5972396486825597, | |
"grad_norm": 36.92323684692383, | |
"learning_rate": 2.693433709744877e-06, | |
"loss": 0.7374, | |
"step": 2070 | |
}, | |
{ | |
"epoch": 2.6097867001254706, | |
"grad_norm": 19.693674087524414, | |
"learning_rate": 2.6097867001254706e-06, | |
"loss": 0.6577, | |
"step": 2080 | |
}, | |
{ | |
"epoch": 2.6223337515683816, | |
"grad_norm": 6.363992691040039, | |
"learning_rate": 2.526139690506065e-06, | |
"loss": 0.7902, | |
"step": 2090 | |
}, | |
{ | |
"epoch": 2.6348808030112925, | |
"grad_norm": 18.565412521362305, | |
"learning_rate": 2.4424926808866585e-06, | |
"loss": 0.638, | |
"step": 2100 | |
}, | |
{ | |
"epoch": 2.6474278544542034, | |
"grad_norm": 23.991640090942383, | |
"learning_rate": 2.3588456712672524e-06, | |
"loss": 0.8057, | |
"step": 2110 | |
}, | |
{ | |
"epoch": 2.6599749058971143, | |
"grad_norm": 11.251315116882324, | |
"learning_rate": 2.2751986616478463e-06, | |
"loss": 0.7766, | |
"step": 2120 | |
}, | |
{ | |
"epoch": 2.672521957340025, | |
"grad_norm": 13.534173011779785, | |
"learning_rate": 2.1915516520284403e-06, | |
"loss": 0.7792, | |
"step": 2130 | |
}, | |
{ | |
"epoch": 2.685069008782936, | |
"grad_norm": 28.147571563720703, | |
"learning_rate": 2.107904642409034e-06, | |
"loss": 0.7725, | |
"step": 2140 | |
}, | |
{ | |
"epoch": 2.697616060225847, | |
"grad_norm": 25.395660400390625, | |
"learning_rate": 2.024257632789628e-06, | |
"loss": 0.7114, | |
"step": 2150 | |
}, | |
{ | |
"epoch": 2.710163111668758, | |
"grad_norm": 11.658987998962402, | |
"learning_rate": 1.9406106231702216e-06, | |
"loss": 0.7515, | |
"step": 2160 | |
}, | |
{ | |
"epoch": 2.722710163111669, | |
"grad_norm": 7.770451068878174, | |
"learning_rate": 1.8569636135508156e-06, | |
"loss": 0.7042, | |
"step": 2170 | |
}, | |
{ | |
"epoch": 2.7352572145545797, | |
"grad_norm": 15.549407005310059, | |
"learning_rate": 1.7733166039314095e-06, | |
"loss": 0.8029, | |
"step": 2180 | |
}, | |
{ | |
"epoch": 2.7478042659974906, | |
"grad_norm": 9.49413013458252, | |
"learning_rate": 1.6896695943120034e-06, | |
"loss": 0.642, | |
"step": 2190 | |
}, | |
{ | |
"epoch": 2.7603513174404015, | |
"grad_norm": 4.831557750701904, | |
"learning_rate": 1.6060225846925972e-06, | |
"loss": 0.6564, | |
"step": 2200 | |
}, | |
{ | |
"epoch": 2.7728983688833124, | |
"grad_norm": 12.240572929382324, | |
"learning_rate": 1.5223755750731913e-06, | |
"loss": 0.7423, | |
"step": 2210 | |
}, | |
{ | |
"epoch": 2.7854454203262233, | |
"grad_norm": 5.629863739013672, | |
"learning_rate": 1.4387285654537853e-06, | |
"loss": 0.8664, | |
"step": 2220 | |
}, | |
{ | |
"epoch": 2.7979924717691342, | |
"grad_norm": 14.666424751281738, | |
"learning_rate": 1.3550815558343792e-06, | |
"loss": 0.8401, | |
"step": 2230 | |
}, | |
{ | |
"epoch": 2.810539523212045, | |
"grad_norm": 21.368179321289062, | |
"learning_rate": 1.271434546214973e-06, | |
"loss": 0.8129, | |
"step": 2240 | |
}, | |
{ | |
"epoch": 2.823086574654956, | |
"grad_norm": 16.279918670654297, | |
"learning_rate": 1.1877875365955668e-06, | |
"loss": 0.676, | |
"step": 2250 | |
}, | |
{ | |
"epoch": 2.835633626097867, | |
"grad_norm": 9.378284454345703, | |
"learning_rate": 1.1041405269761608e-06, | |
"loss": 0.6352, | |
"step": 2260 | |
}, | |
{ | |
"epoch": 2.848180677540778, | |
"grad_norm": 21.204431533813477, | |
"learning_rate": 1.0204935173567545e-06, | |
"loss": 0.7627, | |
"step": 2270 | |
}, | |
{ | |
"epoch": 2.8607277289836888, | |
"grad_norm": 23.616268157958984, | |
"learning_rate": 9.368465077373484e-07, | |
"loss": 0.6356, | |
"step": 2280 | |
}, | |
{ | |
"epoch": 2.8732747804265997, | |
"grad_norm": 25.798891067504883, | |
"learning_rate": 8.531994981179424e-07, | |
"loss": 0.7385, | |
"step": 2290 | |
}, | |
{ | |
"epoch": 2.8858218318695106, | |
"grad_norm": 11.757165908813477, | |
"learning_rate": 7.695524884985363e-07, | |
"loss": 0.6858, | |
"step": 2300 | |
}, | |
{ | |
"epoch": 2.8983688833124215, | |
"grad_norm": 9.811064720153809, | |
"learning_rate": 6.859054788791301e-07, | |
"loss": 0.6791, | |
"step": 2310 | |
}, | |
{ | |
"epoch": 2.9109159347553324, | |
"grad_norm": 14.021916389465332, | |
"learning_rate": 6.02258469259724e-07, | |
"loss": 0.8106, | |
"step": 2320 | |
}, | |
{ | |
"epoch": 2.9234629861982433, | |
"grad_norm": 16.58710479736328, | |
"learning_rate": 5.186114596403179e-07, | |
"loss": 0.8049, | |
"step": 2330 | |
}, | |
{ | |
"epoch": 2.936010037641154, | |
"grad_norm": 8.84688663482666, | |
"learning_rate": 4.349644500209118e-07, | |
"loss": 0.6731, | |
"step": 2340 | |
}, | |
{ | |
"epoch": 2.948557089084065, | |
"grad_norm": 9.54117488861084, | |
"learning_rate": 3.5131744040150566e-07, | |
"loss": 0.7293, | |
"step": 2350 | |
}, | |
{ | |
"epoch": 2.961104140526976, | |
"grad_norm": 17.779142379760742, | |
"learning_rate": 2.6767043078209955e-07, | |
"loss": 0.7803, | |
"step": 2360 | |
}, | |
{ | |
"epoch": 2.973651191969887, | |
"grad_norm": 7.330904006958008, | |
"learning_rate": 1.8402342116269345e-07, | |
"loss": 0.7281, | |
"step": 2370 | |
}, | |
{ | |
"epoch": 2.9861982434127983, | |
"grad_norm": 31.703744888305664, | |
"learning_rate": 1.0037641154328732e-07, | |
"loss": 0.8839, | |
"step": 2380 | |
}, | |
{ | |
"epoch": 2.998745294855709, | |
"grad_norm": 13.579190254211426, | |
"learning_rate": 1.672940192388122e-08, | |
"loss": 0.8382, | |
"step": 2390 | |
} | |
], | |
"logging_steps": 10, | |
"max_steps": 2391, | |
"num_input_tokens_seen": 0, | |
"num_train_epochs": 3, | |
"save_steps": 500, | |
"stateful_callbacks": { | |
"TrainerControl": { | |
"args": { | |
"should_epoch_stop": false, | |
"should_evaluate": false, | |
"should_log": false, | |
"should_save": true, | |
"should_training_stop": true | |
}, | |
"attributes": {} | |
} | |
}, | |
"total_flos": 274122632540160.0, | |
"train_batch_size": 8, | |
"trial_name": null, | |
"trial_params": null | |
} | |