{ "best_metric": 0.3083859980106354, "best_model_checkpoint": "./w2v-bert-2.0-chichewa_34_34h/checkpoint-9000", "epoch": 25.831024930747922, "eval_steps": 1000, "global_step": 14000, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.0018467220683287165, "grad_norm": 22.621971130371094, "learning_rate": 3.0000000000000004e-09, "loss": 9.3625, "step": 1 }, { "epoch": 0.003693444136657433, "grad_norm": 20.207006454467773, "learning_rate": 6.000000000000001e-09, "loss": 8.455, "step": 2 }, { "epoch": 0.00554016620498615, "grad_norm": 19.7971134185791, "learning_rate": 9e-09, "loss": 8.2344, "step": 3 }, { "epoch": 0.007386888273314866, "grad_norm": 19.891284942626953, "learning_rate": 1.2000000000000002e-08, "loss": 8.3048, "step": 4 }, { "epoch": 0.009233610341643583, "grad_norm": 19.400951385498047, "learning_rate": 1.5000000000000002e-08, "loss": 8.0988, "step": 5 }, { "epoch": 0.0110803324099723, "grad_norm": 20.50615882873535, "learning_rate": 1.8e-08, "loss": 8.3828, "step": 6 }, { "epoch": 0.012927054478301015, "grad_norm": 20.289794921875, "learning_rate": 2.1e-08, "loss": 8.2395, "step": 7 }, { "epoch": 0.014773776546629732, "grad_norm": 20.422374725341797, "learning_rate": 2.4000000000000003e-08, "loss": 8.3791, "step": 8 }, { "epoch": 0.01662049861495845, "grad_norm": 20.068317413330078, "learning_rate": 2.7e-08, "loss": 8.223, "step": 9 }, { "epoch": 0.018467220683287166, "grad_norm": 20.406330108642578, "learning_rate": 3.0000000000000004e-08, "loss": 8.2373, "step": 10 }, { "epoch": 0.020313942751615882, "grad_norm": 20.23192024230957, "learning_rate": 3.3000000000000004e-08, "loss": 8.1534, "step": 11 }, { "epoch": 0.0221606648199446, "grad_norm": 20.09835433959961, "learning_rate": 3.6e-08, "loss": 8.0391, "step": 12 }, { "epoch": 0.024007386888273315, "grad_norm": 21.759641647338867, "learning_rate": 3.9e-08, "loss": 8.7457, "step": 13 }, { "epoch": 0.02585410895660203, "grad_norm": 21.865198135375977, "learning_rate": 4.2e-08, "loss": 8.8412, "step": 14 }, { "epoch": 0.027700831024930747, "grad_norm": 22.47687530517578, "learning_rate": 4.5e-08, "loss": 8.9815, "step": 15 }, { "epoch": 0.029547553093259463, "grad_norm": 24.19575309753418, "learning_rate": 4.8000000000000006e-08, "loss": 9.5453, "step": 16 }, { "epoch": 0.03139427516158818, "grad_norm": 23.769922256469727, "learning_rate": 5.1e-08, "loss": 9.4247, "step": 17 }, { "epoch": 0.0332409972299169, "grad_norm": 26.34709358215332, "learning_rate": 5.4e-08, "loss": 10.171, "step": 18 }, { "epoch": 0.03508771929824561, "grad_norm": 27.064607620239258, "learning_rate": 5.7e-08, "loss": 10.2378, "step": 19 }, { "epoch": 0.03693444136657433, "grad_norm": 25.482282638549805, "learning_rate": 6.000000000000001e-08, "loss": 9.9684, "step": 20 }, { "epoch": 0.038781163434903045, "grad_norm": 26.783565521240234, "learning_rate": 6.3e-08, "loss": 10.2665, "step": 21 }, { "epoch": 0.040627885503231764, "grad_norm": 25.946687698364258, "learning_rate": 6.600000000000001e-08, "loss": 9.8199, "step": 22 }, { "epoch": 0.04247460757156048, "grad_norm": 26.15570831298828, "learning_rate": 6.9e-08, "loss": 10.0497, "step": 23 }, { "epoch": 0.0443213296398892, "grad_norm": 26.88849639892578, "learning_rate": 7.2e-08, "loss": 10.1436, "step": 24 }, { "epoch": 0.046168051708217916, "grad_norm": 25.654088973999023, "learning_rate": 7.500000000000001e-08, "loss": 9.8762, "step": 25 }, { "epoch": 0.04801477377654663, "grad_norm": 26.350845336914062, "learning_rate": 7.8e-08, "loss": 9.9597, "step": 26 }, { "epoch": 0.04986149584487535, "grad_norm": 26.18523597717285, "learning_rate": 8.100000000000001e-08, "loss": 9.9127, "step": 27 }, { "epoch": 0.05170821791320406, "grad_norm": 25.764511108398438, "learning_rate": 8.4e-08, "loss": 9.9067, "step": 28 }, { "epoch": 0.05355493998153278, "grad_norm": 26.577726364135742, "learning_rate": 8.7e-08, "loss": 9.9984, "step": 29 }, { "epoch": 0.055401662049861494, "grad_norm": 26.31147575378418, "learning_rate": 9e-08, "loss": 9.9988, "step": 30 }, { "epoch": 0.057248384118190214, "grad_norm": 26.770029067993164, "learning_rate": 9.3e-08, "loss": 10.0926, "step": 31 }, { "epoch": 0.05909510618651893, "grad_norm": 26.448558807373047, "learning_rate": 9.600000000000001e-08, "loss": 9.9067, "step": 32 }, { "epoch": 0.060941828254847646, "grad_norm": 26.373918533325195, "learning_rate": 9.9e-08, "loss": 9.9779, "step": 33 }, { "epoch": 0.06278855032317636, "grad_norm": 27.763418197631836, "learning_rate": 1.02e-07, "loss": 10.1244, "step": 34 }, { "epoch": 0.06463527239150507, "grad_norm": 25.96488380432129, "learning_rate": 1.05e-07, "loss": 9.7475, "step": 35 }, { "epoch": 0.0664819944598338, "grad_norm": 25.816864013671875, "learning_rate": 1.08e-07, "loss": 9.7923, "step": 36 }, { "epoch": 0.06832871652816251, "grad_norm": 26.328279495239258, "learning_rate": 1.11e-07, "loss": 9.8985, "step": 37 }, { "epoch": 0.07017543859649122, "grad_norm": 25.940914154052734, "learning_rate": 1.14e-07, "loss": 9.7801, "step": 38 }, { "epoch": 0.07202216066481995, "grad_norm": 25.886962890625, "learning_rate": 1.17e-07, "loss": 9.7807, "step": 39 }, { "epoch": 0.07386888273314866, "grad_norm": 27.217947006225586, "learning_rate": 1.2000000000000002e-07, "loss": 9.8574, "step": 40 }, { "epoch": 0.07571560480147738, "grad_norm": 26.309337615966797, "learning_rate": 1.23e-07, "loss": 9.5134, "step": 41 }, { "epoch": 0.07756232686980609, "grad_norm": 25.59661102294922, "learning_rate": 1.26e-07, "loss": 9.513, "step": 42 }, { "epoch": 0.07940904893813482, "grad_norm": 27.14220428466797, "learning_rate": 1.29e-07, "loss": 9.6874, "step": 43 }, { "epoch": 0.08125577100646353, "grad_norm": 24.923891067504883, "learning_rate": 1.3200000000000002e-07, "loss": 9.3105, "step": 44 }, { "epoch": 0.08310249307479224, "grad_norm": 25.352060317993164, "learning_rate": 1.35e-07, "loss": 9.3556, "step": 45 }, { "epoch": 0.08494921514312095, "grad_norm": 25.561046600341797, "learning_rate": 1.38e-07, "loss": 9.2043, "step": 46 }, { "epoch": 0.08679593721144968, "grad_norm": 25.8244686126709, "learning_rate": 1.41e-07, "loss": 9.4057, "step": 47 }, { "epoch": 0.0886426592797784, "grad_norm": 26.3791561126709, "learning_rate": 1.44e-07, "loss": 9.2032, "step": 48 }, { "epoch": 0.0904893813481071, "grad_norm": 25.30979347229004, "learning_rate": 1.47e-07, "loss": 9.1291, "step": 49 }, { "epoch": 0.09233610341643583, "grad_norm": 23.93961524963379, "learning_rate": 1.5000000000000002e-07, "loss": 8.591, "step": 50 }, { "epoch": 0.09418282548476455, "grad_norm": 23.533157348632812, "learning_rate": 1.53e-07, "loss": 9.1986, "step": 51 }, { "epoch": 0.09602954755309326, "grad_norm": 19.902755737304688, "learning_rate": 1.56e-07, "loss": 7.9289, "step": 52 }, { "epoch": 0.09787626962142197, "grad_norm": 21.54668426513672, "learning_rate": 1.59e-07, "loss": 8.4068, "step": 53 }, { "epoch": 0.0997229916897507, "grad_norm": 20.574657440185547, "learning_rate": 1.6200000000000002e-07, "loss": 8.0642, "step": 54 }, { "epoch": 0.10156971375807941, "grad_norm": 20.305767059326172, "learning_rate": 1.6499999999999998e-07, "loss": 7.8933, "step": 55 }, { "epoch": 0.10341643582640812, "grad_norm": 21.524248123168945, "learning_rate": 1.68e-07, "loss": 8.2149, "step": 56 }, { "epoch": 0.10526315789473684, "grad_norm": 20.279159545898438, "learning_rate": 1.71e-07, "loss": 7.8225, "step": 57 }, { "epoch": 0.10710987996306556, "grad_norm": 20.45210838317871, "learning_rate": 1.74e-07, "loss": 7.8799, "step": 58 }, { "epoch": 0.10895660203139428, "grad_norm": 21.43587303161621, "learning_rate": 1.77e-07, "loss": 7.9268, "step": 59 }, { "epoch": 0.11080332409972299, "grad_norm": 20.783554077148438, "learning_rate": 1.8e-07, "loss": 7.9966, "step": 60 }, { "epoch": 0.11265004616805172, "grad_norm": 20.195615768432617, "learning_rate": 1.83e-07, "loss": 7.7444, "step": 61 }, { "epoch": 0.11449676823638043, "grad_norm": 22.26091766357422, "learning_rate": 1.86e-07, "loss": 8.2015, "step": 62 }, { "epoch": 0.11634349030470914, "grad_norm": 22.69646453857422, "learning_rate": 1.89e-07, "loss": 8.3655, "step": 63 }, { "epoch": 0.11819021237303785, "grad_norm": 22.68636131286621, "learning_rate": 1.9200000000000003e-07, "loss": 8.1947, "step": 64 }, { "epoch": 0.12003693444136658, "grad_norm": 25.018808364868164, "learning_rate": 1.9499999999999999e-07, "loss": 8.832, "step": 65 }, { "epoch": 0.12188365650969529, "grad_norm": 27.025386810302734, "learning_rate": 1.98e-07, "loss": 9.3476, "step": 66 }, { "epoch": 0.123730378578024, "grad_norm": 27.390701293945312, "learning_rate": 2.01e-07, "loss": 9.5083, "step": 67 }, { "epoch": 0.12557710064635272, "grad_norm": 28.93164825439453, "learning_rate": 2.04e-07, "loss": 9.9266, "step": 68 }, { "epoch": 0.12742382271468145, "grad_norm": 29.568239212036133, "learning_rate": 2.0700000000000001e-07, "loss": 9.9968, "step": 69 }, { "epoch": 0.12927054478301014, "grad_norm": 31.310028076171875, "learning_rate": 2.1e-07, "loss": 10.2074, "step": 70 }, { "epoch": 0.13111726685133887, "grad_norm": 29.128494262695312, "learning_rate": 2.1300000000000001e-07, "loss": 9.6556, "step": 71 }, { "epoch": 0.1329639889196676, "grad_norm": 29.401884078979492, "learning_rate": 2.16e-07, "loss": 9.7587, "step": 72 }, { "epoch": 0.1348107109879963, "grad_norm": 29.164548873901367, "learning_rate": 2.1900000000000002e-07, "loss": 9.6016, "step": 73 }, { "epoch": 0.13665743305632502, "grad_norm": 29.890323638916016, "learning_rate": 2.22e-07, "loss": 9.6611, "step": 74 }, { "epoch": 0.13850415512465375, "grad_norm": 30.824186325073242, "learning_rate": 2.25e-07, "loss": 9.8628, "step": 75 }, { "epoch": 0.14035087719298245, "grad_norm": 30.39980125427246, "learning_rate": 2.28e-07, "loss": 9.7659, "step": 76 }, { "epoch": 0.14219759926131118, "grad_norm": 28.811660766601562, "learning_rate": 2.3100000000000002e-07, "loss": 9.4846, "step": 77 }, { "epoch": 0.1440443213296399, "grad_norm": 30.074052810668945, "learning_rate": 2.34e-07, "loss": 9.6802, "step": 78 }, { "epoch": 0.1458910433979686, "grad_norm": 30.820892333984375, "learning_rate": 2.3700000000000002e-07, "loss": 9.5901, "step": 79 }, { "epoch": 0.14773776546629733, "grad_norm": 28.861011505126953, "learning_rate": 2.4000000000000003e-07, "loss": 9.3774, "step": 80 }, { "epoch": 0.14958448753462603, "grad_norm": 29.80718231201172, "learning_rate": 2.43e-07, "loss": 9.5449, "step": 81 }, { "epoch": 0.15143120960295475, "grad_norm": 29.73357391357422, "learning_rate": 2.46e-07, "loss": 9.2557, "step": 82 }, { "epoch": 0.15327793167128348, "grad_norm": 29.006399154663086, "learning_rate": 2.49e-07, "loss": 9.2959, "step": 83 }, { "epoch": 0.15512465373961218, "grad_norm": 31.09878921508789, "learning_rate": 2.52e-07, "loss": 9.5172, "step": 84 }, { "epoch": 0.1569713758079409, "grad_norm": 30.797834396362305, "learning_rate": 2.5500000000000005e-07, "loss": 9.4964, "step": 85 }, { "epoch": 0.15881809787626963, "grad_norm": 30.867095947265625, "learning_rate": 2.58e-07, "loss": 9.4663, "step": 86 }, { "epoch": 0.16066481994459833, "grad_norm": 30.52632713317871, "learning_rate": 2.6099999999999997e-07, "loss": 9.1204, "step": 87 }, { "epoch": 0.16251154201292706, "grad_norm": 30.950654983520508, "learning_rate": 2.6400000000000003e-07, "loss": 9.2421, "step": 88 }, { "epoch": 0.16435826408125578, "grad_norm": 31.74310302734375, "learning_rate": 2.67e-07, "loss": 9.2844, "step": 89 }, { "epoch": 0.16620498614958448, "grad_norm": 31.408519744873047, "learning_rate": 2.7e-07, "loss": 9.2126, "step": 90 }, { "epoch": 0.1680517082179132, "grad_norm": 31.419960021972656, "learning_rate": 2.73e-07, "loss": 9.2066, "step": 91 }, { "epoch": 0.1698984302862419, "grad_norm": 30.93468475341797, "learning_rate": 2.76e-07, "loss": 9.0334, "step": 92 }, { "epoch": 0.17174515235457063, "grad_norm": 31.12645721435547, "learning_rate": 2.79e-07, "loss": 8.8968, "step": 93 }, { "epoch": 0.17359187442289936, "grad_norm": 30.42648696899414, "learning_rate": 2.82e-07, "loss": 8.8951, "step": 94 }, { "epoch": 0.17543859649122806, "grad_norm": 29.975467681884766, "learning_rate": 2.85e-07, "loss": 8.8357, "step": 95 }, { "epoch": 0.1772853185595568, "grad_norm": 30.835067749023438, "learning_rate": 2.88e-07, "loss": 8.7768, "step": 96 }, { "epoch": 0.1791320406278855, "grad_norm": 30.060455322265625, "learning_rate": 2.91e-07, "loss": 8.7634, "step": 97 }, { "epoch": 0.1809787626962142, "grad_norm": 30.031410217285156, "learning_rate": 2.94e-07, "loss": 8.5461, "step": 98 }, { "epoch": 0.18282548476454294, "grad_norm": 31.30252456665039, "learning_rate": 2.97e-07, "loss": 8.5539, "step": 99 }, { "epoch": 0.18467220683287167, "grad_norm": 29.008281707763672, "learning_rate": 3.0000000000000004e-07, "loss": 8.2293, "step": 100 }, { "epoch": 0.18651892890120036, "grad_norm": 32.37430191040039, "learning_rate": 3.03e-07, "loss": 9.5889, "step": 101 }, { "epoch": 0.1883656509695291, "grad_norm": 25.16545295715332, "learning_rate": 3.06e-07, "loss": 7.8944, "step": 102 }, { "epoch": 0.1902123730378578, "grad_norm": 23.66377067565918, "learning_rate": 3.0900000000000003e-07, "loss": 7.4857, "step": 103 }, { "epoch": 0.19205909510618652, "grad_norm": 24.09159278869629, "learning_rate": 3.12e-07, "loss": 7.511, "step": 104 }, { "epoch": 0.19390581717451524, "grad_norm": 23.55128288269043, "learning_rate": 3.15e-07, "loss": 7.3832, "step": 105 }, { "epoch": 0.19575253924284394, "grad_norm": 24.18902587890625, "learning_rate": 3.18e-07, "loss": 7.4328, "step": 106 }, { "epoch": 0.19759926131117267, "grad_norm": 23.721189498901367, "learning_rate": 3.21e-07, "loss": 7.3789, "step": 107 }, { "epoch": 0.1994459833795014, "grad_norm": 25.13196563720703, "learning_rate": 3.2400000000000004e-07, "loss": 7.4585, "step": 108 }, { "epoch": 0.2012927054478301, "grad_norm": 23.71413230895996, "learning_rate": 3.27e-07, "loss": 7.1487, "step": 109 }, { "epoch": 0.20313942751615882, "grad_norm": 24.34363555908203, "learning_rate": 3.2999999999999996e-07, "loss": 7.1516, "step": 110 }, { "epoch": 0.20498614958448755, "grad_norm": 24.962244033813477, "learning_rate": 3.3300000000000003e-07, "loss": 7.2355, "step": 111 }, { "epoch": 0.20683287165281625, "grad_norm": 25.698650360107422, "learning_rate": 3.36e-07, "loss": 7.3704, "step": 112 }, { "epoch": 0.20867959372114497, "grad_norm": 29.467025756835938, "learning_rate": 3.39e-07, "loss": 7.9443, "step": 113 }, { "epoch": 0.21052631578947367, "grad_norm": 28.43014907836914, "learning_rate": 3.42e-07, "loss": 7.7096, "step": 114 }, { "epoch": 0.2123730378578024, "grad_norm": 33.474647521972656, "learning_rate": 3.45e-07, "loss": 8.204, "step": 115 }, { "epoch": 0.21421975992613113, "grad_norm": Infinity, "learning_rate": 3.45e-07, "loss": 8.3935, "step": 116 }, { "epoch": 0.21606648199445982, "grad_norm": 33.81159591674805, "learning_rate": 3.48e-07, "loss": 8.3841, "step": 117 }, { "epoch": 0.21791320406278855, "grad_norm": 35.76697540283203, "learning_rate": 3.51e-07, "loss": 8.526, "step": 118 }, { "epoch": 0.21975992613111728, "grad_norm": 37.332489013671875, "learning_rate": 3.54e-07, "loss": 8.5882, "step": 119 }, { "epoch": 0.22160664819944598, "grad_norm": 38.213417053222656, "learning_rate": 3.5700000000000003e-07, "loss": 8.5769, "step": 120 }, { "epoch": 0.2234533702677747, "grad_norm": 37.23543167114258, "learning_rate": 3.6e-07, "loss": 8.5438, "step": 121 }, { "epoch": 0.22530009233610343, "grad_norm": 41.91793441772461, "learning_rate": 3.63e-07, "loss": 8.8844, "step": 122 }, { "epoch": 0.22714681440443213, "grad_norm": 38.34842300415039, "learning_rate": 3.66e-07, "loss": 8.4606, "step": 123 }, { "epoch": 0.22899353647276086, "grad_norm": 43.13532638549805, "learning_rate": 3.6900000000000004e-07, "loss": 8.8698, "step": 124 }, { "epoch": 0.23084025854108955, "grad_norm": 39.65250015258789, "learning_rate": 3.72e-07, "loss": 8.4624, "step": 125 }, { "epoch": 0.23268698060941828, "grad_norm": 39.97956848144531, "learning_rate": 3.75e-07, "loss": 8.4699, "step": 126 }, { "epoch": 0.234533702677747, "grad_norm": 39.858314514160156, "learning_rate": 3.78e-07, "loss": 8.4046, "step": 127 }, { "epoch": 0.2363804247460757, "grad_norm": 40.01725387573242, "learning_rate": 3.81e-07, "loss": 8.3207, "step": 128 }, { "epoch": 0.23822714681440443, "grad_norm": 38.98045349121094, "learning_rate": 3.8400000000000005e-07, "loss": 8.0651, "step": 129 }, { "epoch": 0.24007386888273316, "grad_norm": 41.58202362060547, "learning_rate": 3.87e-07, "loss": 8.1929, "step": 130 }, { "epoch": 0.24192059095106186, "grad_norm": 41.836204528808594, "learning_rate": 3.8999999999999997e-07, "loss": 8.0894, "step": 131 }, { "epoch": 0.24376731301939059, "grad_norm": 40.90363693237305, "learning_rate": 3.9300000000000004e-07, "loss": 7.9278, "step": 132 }, { "epoch": 0.24561403508771928, "grad_norm": 44.515872955322266, "learning_rate": 3.96e-07, "loss": 8.1413, "step": 133 }, { "epoch": 0.247460757156048, "grad_norm": 42.47338104248047, "learning_rate": 3.99e-07, "loss": 8.0039, "step": 134 }, { "epoch": 0.24930747922437674, "grad_norm": 44.11112594604492, "learning_rate": 4.02e-07, "loss": 8.0041, "step": 135 }, { "epoch": 0.25115420129270544, "grad_norm": 46.43265914916992, "learning_rate": 4.05e-07, "loss": 8.0428, "step": 136 }, { "epoch": 0.2530009233610342, "grad_norm": 45.253013610839844, "learning_rate": 4.08e-07, "loss": 8.0573, "step": 137 }, { "epoch": 0.2548476454293629, "grad_norm": 43.769744873046875, "learning_rate": 4.11e-07, "loss": 7.8205, "step": 138 }, { "epoch": 0.2566943674976916, "grad_norm": 46.40185546875, "learning_rate": 4.1400000000000003e-07, "loss": 7.7984, "step": 139 }, { "epoch": 0.2585410895660203, "grad_norm": 44.123138427734375, "learning_rate": 4.17e-07, "loss": 7.5095, "step": 140 }, { "epoch": 0.26038781163434904, "grad_norm": 47.95862579345703, "learning_rate": 4.2e-07, "loss": 7.7006, "step": 141 }, { "epoch": 0.26223453370267774, "grad_norm": 43.312129974365234, "learning_rate": 4.23e-07, "loss": 7.2459, "step": 142 }, { "epoch": 0.26408125577100644, "grad_norm": 45.633419036865234, "learning_rate": 4.2600000000000003e-07, "loss": 7.3567, "step": 143 }, { "epoch": 0.2659279778393352, "grad_norm": 43.2322998046875, "learning_rate": 4.2900000000000004e-07, "loss": 7.2379, "step": 144 }, { "epoch": 0.2677746999076639, "grad_norm": 41.97365188598633, "learning_rate": 4.32e-07, "loss": 6.94, "step": 145 }, { "epoch": 0.2696214219759926, "grad_norm": 45.6364860534668, "learning_rate": 4.35e-07, "loss": 7.1133, "step": 146 }, { "epoch": 0.27146814404432135, "grad_norm": 44.61563491821289, "learning_rate": 4.3800000000000003e-07, "loss": 6.9913, "step": 147 }, { "epoch": 0.27331486611265005, "grad_norm": 43.38426208496094, "learning_rate": 4.41e-07, "loss": 6.8122, "step": 148 }, { "epoch": 0.27516158818097874, "grad_norm": 44.04775619506836, "learning_rate": 4.44e-07, "loss": 6.7077, "step": 149 }, { "epoch": 0.2770083102493075, "grad_norm": 41.649017333984375, "learning_rate": 4.47e-07, "loss": 6.4878, "step": 150 }, { "epoch": 0.2788550323176362, "grad_norm": 38.133544921875, "learning_rate": 4.5e-07, "loss": 6.4685, "step": 151 }, { "epoch": 0.2807017543859649, "grad_norm": 39.247467041015625, "learning_rate": 4.5300000000000005e-07, "loss": 6.4852, "step": 152 }, { "epoch": 0.28254847645429365, "grad_norm": 36.58827590942383, "learning_rate": 4.56e-07, "loss": 6.2618, "step": 153 }, { "epoch": 0.28439519852262235, "grad_norm": 33.99684524536133, "learning_rate": 4.5899999999999997e-07, "loss": 6.0097, "step": 154 }, { "epoch": 0.28624192059095105, "grad_norm": 37.434478759765625, "learning_rate": 4.6200000000000003e-07, "loss": 6.1671, "step": 155 }, { "epoch": 0.2880886426592798, "grad_norm": 35.457984924316406, "learning_rate": 4.65e-07, "loss": 5.9534, "step": 156 }, { "epoch": 0.2899353647276085, "grad_norm": 37.76537322998047, "learning_rate": 4.68e-07, "loss": 6.0054, "step": 157 }, { "epoch": 0.2917820867959372, "grad_norm": 34.144412994384766, "learning_rate": 4.7099999999999997e-07, "loss": 5.7492, "step": 158 }, { "epoch": 0.29362880886426596, "grad_norm": 35.62531280517578, "learning_rate": 4.7400000000000004e-07, "loss": 5.7278, "step": 159 }, { "epoch": 0.29547553093259465, "grad_norm": 36.27285385131836, "learning_rate": 4.77e-07, "loss": 5.7173, "step": 160 }, { "epoch": 0.29732225300092335, "grad_norm": 36.02254104614258, "learning_rate": 4.800000000000001e-07, "loss": 5.5855, "step": 161 }, { "epoch": 0.29916897506925205, "grad_norm": 37.09486770629883, "learning_rate": 4.83e-07, "loss": 5.663, "step": 162 }, { "epoch": 0.3010156971375808, "grad_norm": 37.96799087524414, "learning_rate": 4.86e-07, "loss": 5.6351, "step": 163 }, { "epoch": 0.3028624192059095, "grad_norm": 40.50882339477539, "learning_rate": 4.89e-07, "loss": 5.6276, "step": 164 }, { "epoch": 0.3047091412742382, "grad_norm": 41.79549789428711, "learning_rate": 4.92e-07, "loss": 5.6743, "step": 165 }, { "epoch": 0.30655586334256696, "grad_norm": 43.414180755615234, "learning_rate": 4.95e-07, "loss": 5.7052, "step": 166 }, { "epoch": 0.30840258541089566, "grad_norm": 43.84006118774414, "learning_rate": 4.98e-07, "loss": 5.6869, "step": 167 }, { "epoch": 0.31024930747922436, "grad_norm": 46.30332565307617, "learning_rate": 5.01e-07, "loss": 5.7381, "step": 168 }, { "epoch": 0.3120960295475531, "grad_norm": 44.577213287353516, "learning_rate": 5.04e-07, "loss": 5.6472, "step": 169 }, { "epoch": 0.3139427516158818, "grad_norm": 44.917484283447266, "learning_rate": 5.07e-07, "loss": 5.6299, "step": 170 }, { "epoch": 0.3157894736842105, "grad_norm": 47.360355377197266, "learning_rate": 5.100000000000001e-07, "loss": 5.6951, "step": 171 }, { "epoch": 0.31763619575253926, "grad_norm": 41.15222930908203, "learning_rate": 5.13e-07, "loss": 5.4645, "step": 172 }, { "epoch": 0.31948291782086796, "grad_norm": 42.47678756713867, "learning_rate": 5.16e-07, "loss": 5.4574, "step": 173 }, { "epoch": 0.32132963988919666, "grad_norm": 40.974754333496094, "learning_rate": 5.19e-07, "loss": 5.3821, "step": 174 }, { "epoch": 0.3231763619575254, "grad_norm": 37.154075622558594, "learning_rate": 5.219999999999999e-07, "loss": 5.2527, "step": 175 }, { "epoch": 0.3250230840258541, "grad_norm": 34.92766571044922, "learning_rate": 5.250000000000001e-07, "loss": 5.1485, "step": 176 }, { "epoch": 0.3268698060941828, "grad_norm": 33.639408111572266, "learning_rate": 5.280000000000001e-07, "loss": 5.1358, "step": 177 }, { "epoch": 0.32871652816251157, "grad_norm": 31.7966365814209, "learning_rate": 5.31e-07, "loss": 5.0301, "step": 178 }, { "epoch": 0.33056325023084027, "grad_norm": 32.888492584228516, "learning_rate": 5.34e-07, "loss": 5.0694, "step": 179 }, { "epoch": 0.33240997229916897, "grad_norm": 28.766515731811523, "learning_rate": 5.37e-07, "loss": 4.9529, "step": 180 }, { "epoch": 0.33425669436749766, "grad_norm": 27.88300323486328, "learning_rate": 5.4e-07, "loss": 4.928, "step": 181 }, { "epoch": 0.3361034164358264, "grad_norm": 26.481048583984375, "learning_rate": 5.43e-07, "loss": 4.9091, "step": 182 }, { "epoch": 0.3379501385041551, "grad_norm": 23.27083969116211, "learning_rate": 5.46e-07, "loss": 4.86, "step": 183 }, { "epoch": 0.3397968605724838, "grad_norm": 21.233184814453125, "learning_rate": 5.490000000000001e-07, "loss": 4.7144, "step": 184 }, { "epoch": 0.34164358264081257, "grad_norm": 18.95135498046875, "learning_rate": 5.52e-07, "loss": 4.7257, "step": 185 }, { "epoch": 0.34349030470914127, "grad_norm": 16.528169631958008, "learning_rate": 5.55e-07, "loss": 4.7432, "step": 186 }, { "epoch": 0.34533702677746997, "grad_norm": 14.118398666381836, "learning_rate": 5.58e-07, "loss": 4.6586, "step": 187 }, { "epoch": 0.3471837488457987, "grad_norm": 11.961546897888184, "learning_rate": 5.61e-07, "loss": 4.5834, "step": 188 }, { "epoch": 0.3490304709141274, "grad_norm": 10.75606918334961, "learning_rate": 5.64e-07, "loss": 4.6245, "step": 189 }, { "epoch": 0.3508771929824561, "grad_norm": 8.708870887756348, "learning_rate": 5.67e-07, "loss": 4.5646, "step": 190 }, { "epoch": 0.3527239150507849, "grad_norm": 7.9037346839904785, "learning_rate": 5.7e-07, "loss": 4.4985, "step": 191 }, { "epoch": 0.3545706371191136, "grad_norm": 7.6336259841918945, "learning_rate": 5.73e-07, "loss": 4.549, "step": 192 }, { "epoch": 0.3564173591874423, "grad_norm": 8.644391059875488, "learning_rate": 5.76e-07, "loss": 4.5836, "step": 193 }, { "epoch": 0.358264081255771, "grad_norm": 8.554842948913574, "learning_rate": 5.790000000000001e-07, "loss": 4.5207, "step": 194 }, { "epoch": 0.3601108033240997, "grad_norm": 9.063313484191895, "learning_rate": 5.82e-07, "loss": 4.4461, "step": 195 }, { "epoch": 0.3619575253924284, "grad_norm": 10.217832565307617, "learning_rate": 5.85e-07, "loss": 4.5165, "step": 196 }, { "epoch": 0.3638042474607572, "grad_norm": 10.64183235168457, "learning_rate": 5.88e-07, "loss": 4.4659, "step": 197 }, { "epoch": 0.3656509695290859, "grad_norm": 11.097018241882324, "learning_rate": 5.909999999999999e-07, "loss": 4.4518, "step": 198 }, { "epoch": 0.3674976915974146, "grad_norm": 11.132061958312988, "learning_rate": 5.94e-07, "loss": 4.4149, "step": 199 }, { "epoch": 0.36934441366574333, "grad_norm": 12.801709175109863, "learning_rate": 5.970000000000001e-07, "loss": 4.4589, "step": 200 }, { "epoch": 0.37119113573407203, "grad_norm": 7.300925254821777, "learning_rate": 6.000000000000001e-07, "loss": 4.3065, "step": 201 }, { "epoch": 0.37303785780240073, "grad_norm": 6.761147975921631, "learning_rate": 6.03e-07, "loss": 4.2383, "step": 202 }, { "epoch": 0.37488457987072943, "grad_norm": 7.358983516693115, "learning_rate": 6.06e-07, "loss": 4.1914, "step": 203 }, { "epoch": 0.3767313019390582, "grad_norm": 6.69858455657959, "learning_rate": 6.09e-07, "loss": 4.2535, "step": 204 }, { "epoch": 0.3785780240073869, "grad_norm": 7.087483882904053, "learning_rate": 6.12e-07, "loss": 4.1431, "step": 205 }, { "epoch": 0.3804247460757156, "grad_norm": 7.197114944458008, "learning_rate": 6.15e-07, "loss": 4.1343, "step": 206 }, { "epoch": 0.38227146814404434, "grad_norm": 7.602322101593018, "learning_rate": 6.180000000000001e-07, "loss": 4.0637, "step": 207 }, { "epoch": 0.38411819021237303, "grad_norm": 7.007232666015625, "learning_rate": 6.21e-07, "loss": 4.0418, "step": 208 }, { "epoch": 0.38596491228070173, "grad_norm": 6.680031776428223, "learning_rate": 6.24e-07, "loss": 4.0599, "step": 209 }, { "epoch": 0.3878116343490305, "grad_norm": 6.525915145874023, "learning_rate": 6.27e-07, "loss": 4.004, "step": 210 }, { "epoch": 0.3896583564173592, "grad_norm": 6.130704879760742, "learning_rate": 6.3e-07, "loss": 4.0534, "step": 211 }, { "epoch": 0.3915050784856879, "grad_norm": 5.882370471954346, "learning_rate": 6.33e-07, "loss": 4.0821, "step": 212 }, { "epoch": 0.39335180055401664, "grad_norm": 5.746700763702393, "learning_rate": 6.36e-07, "loss": 4.1033, "step": 213 }, { "epoch": 0.39519852262234534, "grad_norm": 5.587550640106201, "learning_rate": 6.39e-07, "loss": 4.0979, "step": 214 }, { "epoch": 0.39704524469067404, "grad_norm": 5.65580415725708, "learning_rate": 6.42e-07, "loss": 4.1246, "step": 215 }, { "epoch": 0.3988919667590028, "grad_norm": 5.67427921295166, "learning_rate": 6.45e-07, "loss": 4.1217, "step": 216 }, { "epoch": 0.4007386888273315, "grad_norm": 6.284714698791504, "learning_rate": 6.480000000000001e-07, "loss": 4.2012, "step": 217 }, { "epoch": 0.4025854108956602, "grad_norm": 5.949926376342773, "learning_rate": 6.51e-07, "loss": 4.1859, "step": 218 }, { "epoch": 0.40443213296398894, "grad_norm": 6.066436767578125, "learning_rate": 6.54e-07, "loss": 4.1414, "step": 219 }, { "epoch": 0.40627885503231764, "grad_norm": 6.133418083190918, "learning_rate": 6.57e-07, "loss": 4.1737, "step": 220 }, { "epoch": 0.40812557710064634, "grad_norm": 5.733587265014648, "learning_rate": 6.599999999999999e-07, "loss": 4.1053, "step": 221 }, { "epoch": 0.4099722991689751, "grad_norm": 5.346182823181152, "learning_rate": 6.63e-07, "loss": 4.0954, "step": 222 }, { "epoch": 0.4118190212373038, "grad_norm": 5.801249980926514, "learning_rate": 6.660000000000001e-07, "loss": 4.1524, "step": 223 }, { "epoch": 0.4136657433056325, "grad_norm": 5.427605628967285, "learning_rate": 6.690000000000001e-07, "loss": 4.1132, "step": 224 }, { "epoch": 0.4155124653739612, "grad_norm": 5.082065582275391, "learning_rate": 6.72e-07, "loss": 4.0983, "step": 225 }, { "epoch": 0.41735918744228995, "grad_norm": 5.642475605010986, "learning_rate": 6.75e-07, "loss": 4.089, "step": 226 }, { "epoch": 0.41920590951061865, "grad_norm": 5.176789283752441, "learning_rate": 6.78e-07, "loss": 4.0512, "step": 227 }, { "epoch": 0.42105263157894735, "grad_norm": 5.5746541023254395, "learning_rate": 6.81e-07, "loss": 4.0565, "step": 228 }, { "epoch": 0.4228993536472761, "grad_norm": 5.4985032081604, "learning_rate": 6.84e-07, "loss": 4.0774, "step": 229 }, { "epoch": 0.4247460757156048, "grad_norm": 5.544588565826416, "learning_rate": 6.87e-07, "loss": 4.0619, "step": 230 }, { "epoch": 0.4265927977839335, "grad_norm": 5.085567474365234, "learning_rate": 6.9e-07, "loss": 3.9928, "step": 231 }, { "epoch": 0.42843951985226225, "grad_norm": 5.949645519256592, "learning_rate": 6.93e-07, "loss": 4.0557, "step": 232 }, { "epoch": 0.43028624192059095, "grad_norm": 5.037247657775879, "learning_rate": 6.96e-07, "loss": 4.005, "step": 233 }, { "epoch": 0.43213296398891965, "grad_norm": 5.97141695022583, "learning_rate": 6.990000000000001e-07, "loss": 4.0016, "step": 234 }, { "epoch": 0.4339796860572484, "grad_norm": 4.68813943862915, "learning_rate": 7.02e-07, "loss": 3.9743, "step": 235 }, { "epoch": 0.4358264081255771, "grad_norm": 14.047794342041016, "learning_rate": 7.05e-07, "loss": 3.9354, "step": 236 }, { "epoch": 0.4376731301939058, "grad_norm": 5.274556636810303, "learning_rate": 7.08e-07, "loss": 4.0027, "step": 237 }, { "epoch": 0.43951985226223456, "grad_norm": 4.5572733879089355, "learning_rate": 7.11e-07, "loss": 3.9243, "step": 238 }, { "epoch": 0.44136657433056325, "grad_norm": 4.318416118621826, "learning_rate": 7.140000000000001e-07, "loss": 3.889, "step": 239 }, { "epoch": 0.44321329639889195, "grad_norm": 4.351585388183594, "learning_rate": 7.170000000000001e-07, "loss": 3.9835, "step": 240 }, { "epoch": 0.4450600184672207, "grad_norm": 4.199488639831543, "learning_rate": 7.2e-07, "loss": 3.9077, "step": 241 }, { "epoch": 0.4469067405355494, "grad_norm": 4.0089898109436035, "learning_rate": 7.23e-07, "loss": 3.8684, "step": 242 }, { "epoch": 0.4487534626038781, "grad_norm": 4.100970268249512, "learning_rate": 7.26e-07, "loss": 3.8637, "step": 243 }, { "epoch": 0.45060018467220686, "grad_norm": 4.072782516479492, "learning_rate": 7.29e-07, "loss": 3.8156, "step": 244 }, { "epoch": 0.45244690674053556, "grad_norm": 4.012927532196045, "learning_rate": 7.32e-07, "loss": 3.8037, "step": 245 }, { "epoch": 0.45429362880886426, "grad_norm": 6.060505390167236, "learning_rate": 7.350000000000001e-07, "loss": 3.9028, "step": 246 }, { "epoch": 0.45614035087719296, "grad_norm": 4.179002285003662, "learning_rate": 7.380000000000001e-07, "loss": 3.8943, "step": 247 }, { "epoch": 0.4579870729455217, "grad_norm": 5.213497638702393, "learning_rate": 7.41e-07, "loss": 3.8375, "step": 248 }, { "epoch": 0.4598337950138504, "grad_norm": 4.26724910736084, "learning_rate": 7.44e-07, "loss": 3.7996, "step": 249 }, { "epoch": 0.4616805170821791, "grad_norm": 12.426194190979004, "learning_rate": 7.47e-07, "loss": 3.8624, "step": 250 }, { "epoch": 0.46352723915050786, "grad_norm": 4.382394790649414, "learning_rate": 7.5e-07, "loss": 3.7192, "step": 251 }, { "epoch": 0.46537396121883656, "grad_norm": 4.744300365447998, "learning_rate": 7.53e-07, "loss": 3.6259, "step": 252 }, { "epoch": 0.46722068328716526, "grad_norm": 4.654346942901611, "learning_rate": 7.56e-07, "loss": 3.5965, "step": 253 }, { "epoch": 0.469067405355494, "grad_norm": 3.6834309101104736, "learning_rate": 7.59e-07, "loss": 3.5887, "step": 254 }, { "epoch": 0.4709141274238227, "grad_norm": 4.092491149902344, "learning_rate": 7.62e-07, "loss": 3.5426, "step": 255 }, { "epoch": 0.4727608494921514, "grad_norm": 4.001455783843994, "learning_rate": 7.65e-07, "loss": 3.5544, "step": 256 }, { "epoch": 0.47460757156048017, "grad_norm": 5.414680480957031, "learning_rate": 7.680000000000001e-07, "loss": 3.4742, "step": 257 }, { "epoch": 0.47645429362880887, "grad_norm": 3.3546714782714844, "learning_rate": 7.71e-07, "loss": 3.4772, "step": 258 }, { "epoch": 0.47830101569713757, "grad_norm": 3.5425257682800293, "learning_rate": 7.74e-07, "loss": 3.4811, "step": 259 }, { "epoch": 0.4801477377654663, "grad_norm": 3.1755106449127197, "learning_rate": 7.77e-07, "loss": 3.4188, "step": 260 }, { "epoch": 0.481994459833795, "grad_norm": 4.707540988922119, "learning_rate": 7.799999999999999e-07, "loss": 3.3899, "step": 261 }, { "epoch": 0.4838411819021237, "grad_norm": 3.285004138946533, "learning_rate": 7.830000000000001e-07, "loss": 3.4876, "step": 262 }, { "epoch": 0.4856879039704525, "grad_norm": 4.9399638175964355, "learning_rate": 7.860000000000001e-07, "loss": 3.4514, "step": 263 }, { "epoch": 0.48753462603878117, "grad_norm": 3.9122636318206787, "learning_rate": 7.89e-07, "loss": 3.476, "step": 264 }, { "epoch": 0.48938134810710987, "grad_norm": 5.649412155151367, "learning_rate": 7.92e-07, "loss": 3.5428, "step": 265 }, { "epoch": 0.49122807017543857, "grad_norm": 14.08123779296875, "learning_rate": 7.95e-07, "loss": 3.5073, "step": 266 }, { "epoch": 0.4930747922437673, "grad_norm": 4.978379726409912, "learning_rate": 7.98e-07, "loss": 3.5416, "step": 267 }, { "epoch": 0.494921514312096, "grad_norm": 4.730838298797607, "learning_rate": 8.01e-07, "loss": 3.5272, "step": 268 }, { "epoch": 0.4967682363804247, "grad_norm": 4.087198257446289, "learning_rate": 8.04e-07, "loss": 3.4973, "step": 269 }, { "epoch": 0.4986149584487535, "grad_norm": 4.161842346191406, "learning_rate": 8.070000000000001e-07, "loss": 3.5052, "step": 270 }, { "epoch": 0.5004616805170822, "grad_norm": 5.521448135375977, "learning_rate": 8.1e-07, "loss": 3.5112, "step": 271 }, { "epoch": 0.5023084025854109, "grad_norm": 3.795762062072754, "learning_rate": 8.13e-07, "loss": 3.4638, "step": 272 }, { "epoch": 0.5041551246537396, "grad_norm": 4.849123001098633, "learning_rate": 8.16e-07, "loss": 3.4319, "step": 273 }, { "epoch": 0.5060018467220684, "grad_norm": 4.427425861358643, "learning_rate": 8.19e-07, "loss": 3.4609, "step": 274 }, { "epoch": 0.5078485687903971, "grad_norm": 4.559118747711182, "learning_rate": 8.22e-07, "loss": 3.4332, "step": 275 }, { "epoch": 0.5096952908587258, "grad_norm": 5.236905574798584, "learning_rate": 8.25e-07, "loss": 3.4064, "step": 276 }, { "epoch": 0.5115420129270545, "grad_norm": 6.23512077331543, "learning_rate": 8.280000000000001e-07, "loss": 3.4709, "step": 277 }, { "epoch": 0.5133887349953832, "grad_norm": 5.505606174468994, "learning_rate": 8.31e-07, "loss": 3.4046, "step": 278 }, { "epoch": 0.5152354570637119, "grad_norm": 15.238096237182617, "learning_rate": 8.34e-07, "loss": 3.3852, "step": 279 }, { "epoch": 0.5170821791320406, "grad_norm": 7.080745697021484, "learning_rate": 8.370000000000001e-07, "loss": 3.3836, "step": 280 }, { "epoch": 0.5189289012003694, "grad_norm": 4.308071136474609, "learning_rate": 8.4e-07, "loss": 3.3356, "step": 281 }, { "epoch": 0.5207756232686981, "grad_norm": 5.031737327575684, "learning_rate": 8.43e-07, "loss": 3.3544, "step": 282 }, { "epoch": 0.5226223453370268, "grad_norm": 4.0981268882751465, "learning_rate": 8.46e-07, "loss": 3.3315, "step": 283 }, { "epoch": 0.5244690674053555, "grad_norm": 8.215164184570312, "learning_rate": 8.489999999999999e-07, "loss": 3.3009, "step": 284 }, { "epoch": 0.5263157894736842, "grad_norm": 4.3822245597839355, "learning_rate": 8.520000000000001e-07, "loss": 3.3268, "step": 285 }, { "epoch": 0.5281625115420129, "grad_norm": 3.282374382019043, "learning_rate": 8.550000000000001e-07, "loss": 3.2991, "step": 286 }, { "epoch": 0.5300092336103417, "grad_norm": 5.968320846557617, "learning_rate": 8.580000000000001e-07, "loss": 3.2981, "step": 287 }, { "epoch": 0.5318559556786704, "grad_norm": 10.67861270904541, "learning_rate": 8.61e-07, "loss": 3.2595, "step": 288 }, { "epoch": 0.5337026777469991, "grad_norm": 3.8899168968200684, "learning_rate": 8.64e-07, "loss": 3.2271, "step": 289 }, { "epoch": 0.5355493998153278, "grad_norm": 4.796634674072266, "learning_rate": 8.67e-07, "loss": 3.3094, "step": 290 }, { "epoch": 0.5373961218836565, "grad_norm": 4.089239120483398, "learning_rate": 8.7e-07, "loss": 3.2095, "step": 291 }, { "epoch": 0.5392428439519852, "grad_norm": 5.146942615509033, "learning_rate": 8.73e-07, "loss": 3.2382, "step": 292 }, { "epoch": 0.541089566020314, "grad_norm": 5.682231903076172, "learning_rate": 8.760000000000001e-07, "loss": 3.2271, "step": 293 }, { "epoch": 0.5429362880886427, "grad_norm": 4.716771602630615, "learning_rate": 8.79e-07, "loss": 3.2294, "step": 294 }, { "epoch": 0.5447830101569714, "grad_norm": 4.983368873596191, "learning_rate": 8.82e-07, "loss": 3.2358, "step": 295 }, { "epoch": 0.5466297322253001, "grad_norm": 7.215044975280762, "learning_rate": 8.85e-07, "loss": 3.2556, "step": 296 }, { "epoch": 0.5484764542936288, "grad_norm": 4.684432506561279, "learning_rate": 8.88e-07, "loss": 3.2566, "step": 297 }, { "epoch": 0.5503231763619575, "grad_norm": 4.458353519439697, "learning_rate": 8.91e-07, "loss": 3.2236, "step": 298 }, { "epoch": 0.5521698984302862, "grad_norm": 8.782550811767578, "learning_rate": 8.94e-07, "loss": 3.2065, "step": 299 }, { "epoch": 0.554016620498615, "grad_norm": 12.405173301696777, "learning_rate": 8.97e-07, "loss": 3.1937, "step": 300 }, { "epoch": 0.5558633425669437, "grad_norm": 12.274544715881348, "learning_rate": 9e-07, "loss": 3.3293, "step": 301 }, { "epoch": 0.5577100646352724, "grad_norm": 10.548261642456055, "learning_rate": 9.03e-07, "loss": 3.2509, "step": 302 }, { "epoch": 0.5595567867036011, "grad_norm": 7.260283946990967, "learning_rate": 9.060000000000001e-07, "loss": 3.1922, "step": 303 }, { "epoch": 0.5614035087719298, "grad_norm": 6.222684860229492, "learning_rate": 9.09e-07, "loss": 3.2229, "step": 304 }, { "epoch": 0.5632502308402585, "grad_norm": 3.192690849304199, "learning_rate": 9.12e-07, "loss": 3.2094, "step": 305 }, { "epoch": 0.5650969529085873, "grad_norm": 4.268246173858643, "learning_rate": 9.15e-07, "loss": 3.1659, "step": 306 }, { "epoch": 0.566943674976916, "grad_norm": 5.622379779815674, "learning_rate": 9.179999999999999e-07, "loss": 3.1662, "step": 307 }, { "epoch": 0.5687903970452447, "grad_norm": 5.32727575302124, "learning_rate": 9.210000000000001e-07, "loss": 3.1983, "step": 308 }, { "epoch": 0.5706371191135734, "grad_norm": 4.53847599029541, "learning_rate": 9.240000000000001e-07, "loss": 3.2035, "step": 309 }, { "epoch": 0.5724838411819021, "grad_norm": 5.334409713745117, "learning_rate": 9.270000000000001e-07, "loss": 3.1746, "step": 310 }, { "epoch": 0.5743305632502308, "grad_norm": 4.618269920349121, "learning_rate": 9.3e-07, "loss": 3.1904, "step": 311 }, { "epoch": 0.5761772853185596, "grad_norm": 2.947032928466797, "learning_rate": 9.33e-07, "loss": 3.1811, "step": 312 }, { "epoch": 0.5780240073868883, "grad_norm": 9.83170223236084, "learning_rate": 9.36e-07, "loss": 3.2034, "step": 313 }, { "epoch": 0.579870729455217, "grad_norm": 4.297440052032471, "learning_rate": 9.39e-07, "loss": 3.1925, "step": 314 }, { "epoch": 0.5817174515235457, "grad_norm": 3.8568122386932373, "learning_rate": 9.419999999999999e-07, "loss": 3.1584, "step": 315 }, { "epoch": 0.5835641735918744, "grad_norm": 3.8877062797546387, "learning_rate": 9.450000000000001e-07, "loss": 3.1015, "step": 316 }, { "epoch": 0.5854108956602031, "grad_norm": 3.579789400100708, "learning_rate": 9.480000000000001e-07, "loss": 3.1743, "step": 317 }, { "epoch": 0.5872576177285319, "grad_norm": 3.8187687397003174, "learning_rate": 9.51e-07, "loss": 3.2222, "step": 318 }, { "epoch": 0.5891043397968606, "grad_norm": 5.050365447998047, "learning_rate": 9.54e-07, "loss": 3.1866, "step": 319 }, { "epoch": 0.5909510618651893, "grad_norm": 4.368477821350098, "learning_rate": 9.57e-07, "loss": 3.1362, "step": 320 }, { "epoch": 0.592797783933518, "grad_norm": 7.682777404785156, "learning_rate": 9.600000000000001e-07, "loss": 3.1577, "step": 321 }, { "epoch": 0.5946445060018467, "grad_norm": 4.716755390167236, "learning_rate": 9.63e-07, "loss": 3.1668, "step": 322 }, { "epoch": 0.5964912280701754, "grad_norm": 7.155860424041748, "learning_rate": 9.66e-07, "loss": 3.1626, "step": 323 }, { "epoch": 0.5983379501385041, "grad_norm": 3.2187845706939697, "learning_rate": 9.690000000000002e-07, "loss": 3.0961, "step": 324 }, { "epoch": 0.6001846722068329, "grad_norm": 7.184601783752441, "learning_rate": 9.72e-07, "loss": 3.1912, "step": 325 }, { "epoch": 0.6020313942751616, "grad_norm": 6.903328895568848, "learning_rate": 9.75e-07, "loss": 3.1594, "step": 326 }, { "epoch": 0.6038781163434903, "grad_norm": 6.267014980316162, "learning_rate": 9.78e-07, "loss": 3.1753, "step": 327 }, { "epoch": 0.605724838411819, "grad_norm": 6.188835620880127, "learning_rate": 9.81e-07, "loss": 3.1726, "step": 328 }, { "epoch": 0.6075715604801477, "grad_norm": 4.90877103805542, "learning_rate": 9.84e-07, "loss": 3.1101, "step": 329 }, { "epoch": 0.6094182825484764, "grad_norm": 5.561330318450928, "learning_rate": 9.87e-07, "loss": 3.1213, "step": 330 }, { "epoch": 0.6112650046168052, "grad_norm": 4.409107208251953, "learning_rate": 9.9e-07, "loss": 3.1224, "step": 331 }, { "epoch": 0.6131117266851339, "grad_norm": 2.835259437561035, "learning_rate": 9.929999999999999e-07, "loss": 3.1419, "step": 332 }, { "epoch": 0.6149584487534626, "grad_norm": 5.65529203414917, "learning_rate": 9.96e-07, "loss": 3.1314, "step": 333 }, { "epoch": 0.6168051708217913, "grad_norm": 5.926425457000732, "learning_rate": 9.99e-07, "loss": 3.1238, "step": 334 }, { "epoch": 0.61865189289012, "grad_norm": 4.398102760314941, "learning_rate": 1.002e-06, "loss": 3.1094, "step": 335 }, { "epoch": 0.6204986149584487, "grad_norm": 5.749934673309326, "learning_rate": 1.0050000000000001e-06, "loss": 3.158, "step": 336 }, { "epoch": 0.6223453370267775, "grad_norm": 4.760705947875977, "learning_rate": 1.008e-06, "loss": 3.1159, "step": 337 }, { "epoch": 0.6241920590951062, "grad_norm": 7.050318241119385, "learning_rate": 1.0110000000000001e-06, "loss": 3.1001, "step": 338 }, { "epoch": 0.6260387811634349, "grad_norm": 7.832237720489502, "learning_rate": 1.014e-06, "loss": 3.1326, "step": 339 }, { "epoch": 0.6278855032317636, "grad_norm": 4.517617225646973, "learning_rate": 1.017e-06, "loss": 3.1487, "step": 340 }, { "epoch": 0.6297322253000923, "grad_norm": 6.685205936431885, "learning_rate": 1.0200000000000002e-06, "loss": 3.0753, "step": 341 }, { "epoch": 0.631578947368421, "grad_norm": 6.85542106628418, "learning_rate": 1.023e-06, "loss": 3.1213, "step": 342 }, { "epoch": 0.6334256694367497, "grad_norm": 6.824644565582275, "learning_rate": 1.026e-06, "loss": 3.0636, "step": 343 }, { "epoch": 0.6352723915050785, "grad_norm": 8.230368614196777, "learning_rate": 1.029e-06, "loss": 3.0875, "step": 344 }, { "epoch": 0.6371191135734072, "grad_norm": 3.8798701763153076, "learning_rate": 1.032e-06, "loss": 3.1108, "step": 345 }, { "epoch": 0.6389658356417359, "grad_norm": 4.092694282531738, "learning_rate": 1.035e-06, "loss": 3.1304, "step": 346 }, { "epoch": 0.6408125577100646, "grad_norm": 4.441507339477539, "learning_rate": 1.038e-06, "loss": 3.1214, "step": 347 }, { "epoch": 0.6426592797783933, "grad_norm": 8.2803955078125, "learning_rate": 1.041e-06, "loss": 3.0863, "step": 348 }, { "epoch": 0.644506001846722, "grad_norm": 5.707834720611572, "learning_rate": 1.0439999999999999e-06, "loss": 3.1261, "step": 349 }, { "epoch": 0.6463527239150508, "grad_norm": 6.7514238357543945, "learning_rate": 1.047e-06, "loss": 3.0936, "step": 350 }, { "epoch": 0.6481994459833795, "grad_norm": 9.191317558288574, "learning_rate": 1.0500000000000001e-06, "loss": 3.1867, "step": 351 }, { "epoch": 0.6500461680517082, "grad_norm": 8.901002883911133, "learning_rate": 1.053e-06, "loss": 3.1831, "step": 352 }, { "epoch": 0.6518928901200369, "grad_norm": 5.3682169914245605, "learning_rate": 1.0560000000000001e-06, "loss": 3.115, "step": 353 }, { "epoch": 0.6537396121883656, "grad_norm": 3.2384531497955322, "learning_rate": 1.059e-06, "loss": 3.1332, "step": 354 }, { "epoch": 0.6555863342566943, "grad_norm": 2.113199472427368, "learning_rate": 1.062e-06, "loss": 3.1003, "step": 355 }, { "epoch": 0.6574330563250231, "grad_norm": 3.5084481239318848, "learning_rate": 1.065e-06, "loss": 3.0896, "step": 356 }, { "epoch": 0.6592797783933518, "grad_norm": 4.078393936157227, "learning_rate": 1.068e-06, "loss": 3.0751, "step": 357 }, { "epoch": 0.6611265004616805, "grad_norm": 6.067633628845215, "learning_rate": 1.0710000000000002e-06, "loss": 3.0848, "step": 358 }, { "epoch": 0.6629732225300092, "grad_norm": 5.45023775100708, "learning_rate": 1.074e-06, "loss": 3.0941, "step": 359 }, { "epoch": 0.6648199445983379, "grad_norm": 4.068233966827393, "learning_rate": 1.077e-06, "loss": 3.0869, "step": 360 }, { "epoch": 0.6666666666666666, "grad_norm": 3.762965202331543, "learning_rate": 1.08e-06, "loss": 3.1117, "step": 361 }, { "epoch": 0.6685133887349953, "grad_norm": 2.311380624771118, "learning_rate": 1.083e-06, "loss": 3.0524, "step": 362 }, { "epoch": 0.6703601108033241, "grad_norm": 4.134860038757324, "learning_rate": 1.086e-06, "loss": 3.0798, "step": 363 }, { "epoch": 0.6722068328716528, "grad_norm": 3.7544875144958496, "learning_rate": 1.089e-06, "loss": 3.1159, "step": 364 }, { "epoch": 0.6740535549399815, "grad_norm": 3.5989551544189453, "learning_rate": 1.092e-06, "loss": 3.0632, "step": 365 }, { "epoch": 0.6759002770083102, "grad_norm": 4.7507853507995605, "learning_rate": 1.0949999999999999e-06, "loss": 3.0727, "step": 366 }, { "epoch": 0.6777469990766389, "grad_norm": 7.671990871429443, "learning_rate": 1.0980000000000001e-06, "loss": 3.0851, "step": 367 }, { "epoch": 0.6795937211449676, "grad_norm": 6.9083380699157715, "learning_rate": 1.1010000000000001e-06, "loss": 3.0607, "step": 368 }, { "epoch": 0.6814404432132964, "grad_norm": 4.966256618499756, "learning_rate": 1.104e-06, "loss": 3.0911, "step": 369 }, { "epoch": 0.6832871652816251, "grad_norm": 12.021642684936523, "learning_rate": 1.1070000000000002e-06, "loss": 3.0722, "step": 370 }, { "epoch": 0.6851338873499538, "grad_norm": 3.737931728363037, "learning_rate": 1.11e-06, "loss": 3.0455, "step": 371 }, { "epoch": 0.6869806094182825, "grad_norm": 4.485469341278076, "learning_rate": 1.113e-06, "loss": 3.0536, "step": 372 }, { "epoch": 0.6888273314866112, "grad_norm": 6.518353462219238, "learning_rate": 1.116e-06, "loss": 3.049, "step": 373 }, { "epoch": 0.6906740535549399, "grad_norm": 4.276355743408203, "learning_rate": 1.119e-06, "loss": 3.0453, "step": 374 }, { "epoch": 0.6925207756232687, "grad_norm": 7.017218112945557, "learning_rate": 1.122e-06, "loss": 3.0717, "step": 375 }, { "epoch": 0.6943674976915974, "grad_norm": 4.840349197387695, "learning_rate": 1.125e-06, "loss": 3.0627, "step": 376 }, { "epoch": 0.6962142197599261, "grad_norm": 6.178562164306641, "learning_rate": 1.128e-06, "loss": 3.053, "step": 377 }, { "epoch": 0.6980609418282548, "grad_norm": 7.595609664916992, "learning_rate": 1.131e-06, "loss": 3.1017, "step": 378 }, { "epoch": 0.6999076638965835, "grad_norm": 4.348419666290283, "learning_rate": 1.134e-06, "loss": 3.0428, "step": 379 }, { "epoch": 0.7017543859649122, "grad_norm": 4.139669418334961, "learning_rate": 1.137e-06, "loss": 3.0727, "step": 380 }, { "epoch": 0.703601108033241, "grad_norm": 5.153532981872559, "learning_rate": 1.14e-06, "loss": 3.0405, "step": 381 }, { "epoch": 0.7054478301015698, "grad_norm": 4.004812240600586, "learning_rate": 1.1430000000000001e-06, "loss": 3.0353, "step": 382 }, { "epoch": 0.7072945521698984, "grad_norm": 5.635801792144775, "learning_rate": 1.146e-06, "loss": 3.0672, "step": 383 }, { "epoch": 0.7091412742382271, "grad_norm": 3.6818764209747314, "learning_rate": 1.1490000000000001e-06, "loss": 3.0607, "step": 384 }, { "epoch": 0.7109879963065558, "grad_norm": 8.386887550354004, "learning_rate": 1.152e-06, "loss": 3.0002, "step": 385 }, { "epoch": 0.7128347183748845, "grad_norm": 7.178035259246826, "learning_rate": 1.155e-06, "loss": 3.0347, "step": 386 }, { "epoch": 0.7146814404432132, "grad_norm": 3.6302337646484375, "learning_rate": 1.1580000000000002e-06, "loss": 3.0274, "step": 387 }, { "epoch": 0.716528162511542, "grad_norm": 3.4200942516326904, "learning_rate": 1.161e-06, "loss": 3.0377, "step": 388 }, { "epoch": 0.7183748845798708, "grad_norm": 3.180461883544922, "learning_rate": 1.164e-06, "loss": 3.0216, "step": 389 }, { "epoch": 0.7202216066481995, "grad_norm": 3.9970366954803467, "learning_rate": 1.167e-06, "loss": 3.0542, "step": 390 }, { "epoch": 0.7220683287165282, "grad_norm": 8.964760780334473, "learning_rate": 1.17e-06, "loss": 3.0152, "step": 391 }, { "epoch": 0.7239150507848569, "grad_norm": 4.677817344665527, "learning_rate": 1.173e-06, "loss": 3.0372, "step": 392 }, { "epoch": 0.7257617728531855, "grad_norm": 5.287402153015137, "learning_rate": 1.176e-06, "loss": 2.9933, "step": 393 }, { "epoch": 0.7276084949215144, "grad_norm": 4.688521385192871, "learning_rate": 1.179e-06, "loss": 3.0533, "step": 394 }, { "epoch": 0.7294552169898431, "grad_norm": 5.032068729400635, "learning_rate": 1.1819999999999999e-06, "loss": 3.0147, "step": 395 }, { "epoch": 0.7313019390581718, "grad_norm": 6.2361626625061035, "learning_rate": 1.185e-06, "loss": 3.0879, "step": 396 }, { "epoch": 0.7331486611265005, "grad_norm": 7.5859270095825195, "learning_rate": 1.188e-06, "loss": 3.0227, "step": 397 }, { "epoch": 0.7349953831948292, "grad_norm": 6.370418071746826, "learning_rate": 1.191e-06, "loss": 2.9768, "step": 398 }, { "epoch": 0.7368421052631579, "grad_norm": 4.1537675857543945, "learning_rate": 1.1940000000000001e-06, "loss": 3.0001, "step": 399 }, { "epoch": 0.7386888273314867, "grad_norm": 4.340860843658447, "learning_rate": 1.197e-06, "loss": 3.0386, "step": 400 }, { "epoch": 0.7405355493998154, "grad_norm": 7.044209003448486, "learning_rate": 1.2000000000000002e-06, "loss": 3.187, "step": 401 }, { "epoch": 0.7423822714681441, "grad_norm": 9.477621078491211, "learning_rate": 1.203e-06, "loss": 3.0968, "step": 402 }, { "epoch": 0.7442289935364728, "grad_norm": 4.682133197784424, "learning_rate": 1.206e-06, "loss": 3.0578, "step": 403 }, { "epoch": 0.7460757156048015, "grad_norm": 3.7250590324401855, "learning_rate": 1.2090000000000002e-06, "loss": 3.0519, "step": 404 }, { "epoch": 0.7479224376731302, "grad_norm": 2.828423500061035, "learning_rate": 1.212e-06, "loss": 3.0602, "step": 405 }, { "epoch": 0.7497691597414589, "grad_norm": 4.87825870513916, "learning_rate": 1.215e-06, "loss": 3.0154, "step": 406 }, { "epoch": 0.7516158818097877, "grad_norm": 5.9660234451293945, "learning_rate": 1.218e-06, "loss": 3.0498, "step": 407 }, { "epoch": 0.7534626038781164, "grad_norm": 6.8960862159729, "learning_rate": 1.221e-06, "loss": 3.014, "step": 408 }, { "epoch": 0.7553093259464451, "grad_norm": 3.969679117202759, "learning_rate": 1.224e-06, "loss": 3.0294, "step": 409 }, { "epoch": 0.7571560480147738, "grad_norm": 4.408971786499023, "learning_rate": 1.227e-06, "loss": 2.9686, "step": 410 }, { "epoch": 0.7590027700831025, "grad_norm": 4.6965107917785645, "learning_rate": 1.23e-06, "loss": 3.0472, "step": 411 }, { "epoch": 0.7608494921514312, "grad_norm": 3.0788402557373047, "learning_rate": 1.2329999999999999e-06, "loss": 2.9867, "step": 412 }, { "epoch": 0.76269621421976, "grad_norm": 5.122383117675781, "learning_rate": 1.2360000000000001e-06, "loss": 3.0021, "step": 413 }, { "epoch": 0.7645429362880887, "grad_norm": 3.920349359512329, "learning_rate": 1.2390000000000001e-06, "loss": 2.9976, "step": 414 }, { "epoch": 0.7663896583564174, "grad_norm": 8.338603019714355, "learning_rate": 1.242e-06, "loss": 2.9776, "step": 415 }, { "epoch": 0.7682363804247461, "grad_norm": 3.2974908351898193, "learning_rate": 1.2450000000000002e-06, "loss": 3.0257, "step": 416 }, { "epoch": 0.7700831024930748, "grad_norm": 4.498806953430176, "learning_rate": 1.248e-06, "loss": 2.9794, "step": 417 }, { "epoch": 0.7719298245614035, "grad_norm": 3.317884683609009, "learning_rate": 1.251e-06, "loss": 3.0031, "step": 418 }, { "epoch": 0.7737765466297323, "grad_norm": 5.48812198638916, "learning_rate": 1.254e-06, "loss": 3.0484, "step": 419 }, { "epoch": 0.775623268698061, "grad_norm": 5.9689154624938965, "learning_rate": 1.257e-06, "loss": 3.0451, "step": 420 }, { "epoch": 0.7774699907663897, "grad_norm": 4.714944839477539, "learning_rate": 1.26e-06, "loss": 3.0069, "step": 421 }, { "epoch": 0.7793167128347184, "grad_norm": 4.204563617706299, "learning_rate": 1.263e-06, "loss": 3.0274, "step": 422 }, { "epoch": 0.7811634349030471, "grad_norm": 4.774872779846191, "learning_rate": 1.266e-06, "loss": 2.9794, "step": 423 }, { "epoch": 0.7830101569713758, "grad_norm": 5.415470600128174, "learning_rate": 1.269e-06, "loss": 3.0081, "step": 424 }, { "epoch": 0.7848568790397045, "grad_norm": 4.5735554695129395, "learning_rate": 1.272e-06, "loss": 3.0158, "step": 425 }, { "epoch": 0.7867036011080333, "grad_norm": 4.952023506164551, "learning_rate": 1.275e-06, "loss": 3.0651, "step": 426 }, { "epoch": 0.788550323176362, "grad_norm": 4.689613342285156, "learning_rate": 1.278e-06, "loss": 2.9854, "step": 427 }, { "epoch": 0.7903970452446907, "grad_norm": 5.4375762939453125, "learning_rate": 1.281e-06, "loss": 2.9901, "step": 428 }, { "epoch": 0.7922437673130194, "grad_norm": 4.584848880767822, "learning_rate": 1.284e-06, "loss": 3.0129, "step": 429 }, { "epoch": 0.7940904893813481, "grad_norm": 5.961929798126221, "learning_rate": 1.2870000000000001e-06, "loss": 3.017, "step": 430 }, { "epoch": 0.7959372114496768, "grad_norm": 6.332926273345947, "learning_rate": 1.29e-06, "loss": 3.0211, "step": 431 }, { "epoch": 0.7977839335180056, "grad_norm": 4.257788181304932, "learning_rate": 1.293e-06, "loss": 3.0044, "step": 432 }, { "epoch": 0.7996306555863343, "grad_norm": 4.114194393157959, "learning_rate": 1.2960000000000002e-06, "loss": 2.9987, "step": 433 }, { "epoch": 0.801477377654663, "grad_norm": 12.284896850585938, "learning_rate": 1.299e-06, "loss": 2.9784, "step": 434 }, { "epoch": 0.8033240997229917, "grad_norm": 7.53204870223999, "learning_rate": 1.302e-06, "loss": 3.0016, "step": 435 }, { "epoch": 0.8051708217913204, "grad_norm": 3.8289263248443604, "learning_rate": 1.305e-06, "loss": 3.0036, "step": 436 }, { "epoch": 0.8070175438596491, "grad_norm": 6.599183559417725, "learning_rate": 1.308e-06, "loss": 2.9835, "step": 437 }, { "epoch": 0.8088642659279779, "grad_norm": 4.600238800048828, "learning_rate": 1.311e-06, "loss": 3.0261, "step": 438 }, { "epoch": 0.8107109879963066, "grad_norm": 4.442101955413818, "learning_rate": 1.314e-06, "loss": 2.9914, "step": 439 }, { "epoch": 0.8125577100646353, "grad_norm": 4.377070903778076, "learning_rate": 1.317e-06, "loss": 2.9763, "step": 440 }, { "epoch": 0.814404432132964, "grad_norm": 5.045103549957275, "learning_rate": 1.3199999999999999e-06, "loss": 2.9904, "step": 441 }, { "epoch": 0.8162511542012927, "grad_norm": 6.373979568481445, "learning_rate": 1.323e-06, "loss": 3.0087, "step": 442 }, { "epoch": 0.8180978762696214, "grad_norm": 4.301845073699951, "learning_rate": 1.326e-06, "loss": 2.9853, "step": 443 }, { "epoch": 0.8199445983379502, "grad_norm": 15.533903121948242, "learning_rate": 1.3290000000000001e-06, "loss": 3.0014, "step": 444 }, { "epoch": 0.8217913204062789, "grad_norm": 4.522268772125244, "learning_rate": 1.3320000000000001e-06, "loss": 2.9524, "step": 445 }, { "epoch": 0.8236380424746076, "grad_norm": 4.2080559730529785, "learning_rate": 1.335e-06, "loss": 3.0163, "step": 446 }, { "epoch": 0.8254847645429363, "grad_norm": 4.199611663818359, "learning_rate": 1.3380000000000001e-06, "loss": 2.996, "step": 447 }, { "epoch": 0.827331486611265, "grad_norm": 4.514741897583008, "learning_rate": 1.341e-06, "loss": 2.9532, "step": 448 }, { "epoch": 0.8291782086795937, "grad_norm": 4.151061058044434, "learning_rate": 1.344e-06, "loss": 2.973, "step": 449 }, { "epoch": 0.8310249307479224, "grad_norm": 8.964682579040527, "learning_rate": 1.3470000000000002e-06, "loss": 2.998, "step": 450 }, { "epoch": 0.8328716528162512, "grad_norm": 7.244773864746094, "learning_rate": 1.35e-06, "loss": 3.1545, "step": 451 }, { "epoch": 0.8347183748845799, "grad_norm": 5.385263919830322, "learning_rate": 1.353e-06, "loss": 3.0276, "step": 452 }, { "epoch": 0.8365650969529086, "grad_norm": 2.983064889907837, "learning_rate": 1.356e-06, "loss": 2.9877, "step": 453 }, { "epoch": 0.8384118190212373, "grad_norm": 4.306495666503906, "learning_rate": 1.359e-06, "loss": 2.9808, "step": 454 }, { "epoch": 0.840258541089566, "grad_norm": 5.624070167541504, "learning_rate": 1.362e-06, "loss": 3.0441, "step": 455 }, { "epoch": 0.8421052631578947, "grad_norm": 5.876847267150879, "learning_rate": 1.365e-06, "loss": 3.014, "step": 456 }, { "epoch": 0.8439519852262235, "grad_norm": 5.433878421783447, "learning_rate": 1.368e-06, "loss": 3.0038, "step": 457 }, { "epoch": 0.8457987072945522, "grad_norm": 2.7806458473205566, "learning_rate": 1.3709999999999999e-06, "loss": 2.9971, "step": 458 }, { "epoch": 0.8476454293628809, "grad_norm": 3.286996603012085, "learning_rate": 1.374e-06, "loss": 2.9475, "step": 459 }, { "epoch": 0.8494921514312096, "grad_norm": 4.864804267883301, "learning_rate": 1.3770000000000001e-06, "loss": 2.9524, "step": 460 }, { "epoch": 0.8513388734995383, "grad_norm": 5.1426615715026855, "learning_rate": 1.38e-06, "loss": 2.9754, "step": 461 }, { "epoch": 0.853185595567867, "grad_norm": 6.4956955909729, "learning_rate": 1.3830000000000001e-06, "loss": 2.9736, "step": 462 }, { "epoch": 0.8550323176361958, "grad_norm": 5.773088455200195, "learning_rate": 1.386e-06, "loss": 2.989, "step": 463 }, { "epoch": 0.8568790397045245, "grad_norm": 4.204294204711914, "learning_rate": 1.389e-06, "loss": 2.9945, "step": 464 }, { "epoch": 0.8587257617728532, "grad_norm": 4.098605632781982, "learning_rate": 1.392e-06, "loss": 2.9446, "step": 465 }, { "epoch": 0.8605724838411819, "grad_norm": 5.481067180633545, "learning_rate": 1.395e-06, "loss": 2.9319, "step": 466 }, { "epoch": 0.8624192059095106, "grad_norm": 6.614928245544434, "learning_rate": 1.3980000000000002e-06, "loss": 2.9674, "step": 467 }, { "epoch": 0.8642659279778393, "grad_norm": 7.987035751342773, "learning_rate": 1.401e-06, "loss": 2.9716, "step": 468 }, { "epoch": 0.866112650046168, "grad_norm": 5.396458625793457, "learning_rate": 1.404e-06, "loss": 2.9757, "step": 469 }, { "epoch": 0.8679593721144968, "grad_norm": 7.34463357925415, "learning_rate": 1.407e-06, "loss": 2.9759, "step": 470 }, { "epoch": 0.8698060941828255, "grad_norm": 4.453892707824707, "learning_rate": 1.41e-06, "loss": 2.9274, "step": 471 }, { "epoch": 0.8716528162511542, "grad_norm": 6.051670551300049, "learning_rate": 1.413e-06, "loss": 2.9369, "step": 472 }, { "epoch": 0.8734995383194829, "grad_norm": 5.094610214233398, "learning_rate": 1.416e-06, "loss": 2.9687, "step": 473 }, { "epoch": 0.8753462603878116, "grad_norm": 6.582310199737549, "learning_rate": 1.419e-06, "loss": 2.9616, "step": 474 }, { "epoch": 0.8771929824561403, "grad_norm": 6.141293525695801, "learning_rate": 1.422e-06, "loss": 2.9422, "step": 475 }, { "epoch": 0.8790397045244691, "grad_norm": 11.331812858581543, "learning_rate": 1.4250000000000001e-06, "loss": 3.0109, "step": 476 }, { "epoch": 0.8808864265927978, "grad_norm": 3.9797582626342773, "learning_rate": 1.4280000000000001e-06, "loss": 2.9676, "step": 477 }, { "epoch": 0.8827331486611265, "grad_norm": 10.876532554626465, "learning_rate": 1.431e-06, "loss": 2.9751, "step": 478 }, { "epoch": 0.8845798707294552, "grad_norm": 6.678962707519531, "learning_rate": 1.4340000000000002e-06, "loss": 2.9888, "step": 479 }, { "epoch": 0.8864265927977839, "grad_norm": 5.212357044219971, "learning_rate": 1.437e-06, "loss": 2.9691, "step": 480 }, { "epoch": 0.8882733148661126, "grad_norm": 10.343843460083008, "learning_rate": 1.44e-06, "loss": 2.9692, "step": 481 }, { "epoch": 0.8901200369344414, "grad_norm": 5.704240798950195, "learning_rate": 1.443e-06, "loss": 2.9718, "step": 482 }, { "epoch": 0.8919667590027701, "grad_norm": 5.897530555725098, "learning_rate": 1.446e-06, "loss": 2.9566, "step": 483 }, { "epoch": 0.8938134810710988, "grad_norm": 6.475763320922852, "learning_rate": 1.449e-06, "loss": 2.9642, "step": 484 }, { "epoch": 0.8956602031394275, "grad_norm": 12.50463581085205, "learning_rate": 1.452e-06, "loss": 2.9554, "step": 485 }, { "epoch": 0.8975069252077562, "grad_norm": 9.410432815551758, "learning_rate": 1.455e-06, "loss": 2.9559, "step": 486 }, { "epoch": 0.8993536472760849, "grad_norm": 5.922210216522217, "learning_rate": 1.458e-06, "loss": 3.0047, "step": 487 }, { "epoch": 0.9012003693444137, "grad_norm": 4.550955295562744, "learning_rate": 1.461e-06, "loss": 2.9478, "step": 488 }, { "epoch": 0.9030470914127424, "grad_norm": 4.072491645812988, "learning_rate": 1.464e-06, "loss": 2.9659, "step": 489 }, { "epoch": 0.9048938134810711, "grad_norm": 5.787655353546143, "learning_rate": 1.467e-06, "loss": 2.9295, "step": 490 }, { "epoch": 0.9067405355493998, "grad_norm": 6.225001335144043, "learning_rate": 1.4700000000000001e-06, "loss": 2.9969, "step": 491 }, { "epoch": 0.9085872576177285, "grad_norm": 5.337482452392578, "learning_rate": 1.473e-06, "loss": 2.9654, "step": 492 }, { "epoch": 0.9104339796860572, "grad_norm": 5.667120456695557, "learning_rate": 1.4760000000000001e-06, "loss": 2.9851, "step": 493 }, { "epoch": 0.9122807017543859, "grad_norm": 5.20329475402832, "learning_rate": 1.479e-06, "loss": 2.9161, "step": 494 }, { "epoch": 0.9141274238227147, "grad_norm": 8.450933456420898, "learning_rate": 1.482e-06, "loss": 2.94, "step": 495 }, { "epoch": 0.9159741458910434, "grad_norm": 4.999294281005859, "learning_rate": 1.4850000000000002e-06, "loss": 2.9509, "step": 496 }, { "epoch": 0.9178208679593721, "grad_norm": 12.855695724487305, "learning_rate": 1.488e-06, "loss": 2.9833, "step": 497 }, { "epoch": 0.9196675900277008, "grad_norm": 3.0434823036193848, "learning_rate": 1.491e-06, "loss": 2.9338, "step": 498 }, { "epoch": 0.9215143120960295, "grad_norm": 7.846760272979736, "learning_rate": 1.494e-06, "loss": 3.0113, "step": 499 }, { "epoch": 0.9233610341643582, "grad_norm": 7.970994472503662, "learning_rate": 1.497e-06, "loss": 3.0131, "step": 500 }, { "epoch": 0.925207756232687, "grad_norm": 5.931797981262207, "learning_rate": 1.5e-06, "loss": 3.1065, "step": 501 }, { "epoch": 0.9270544783010157, "grad_norm": 5.381325721740723, "learning_rate": 1.503e-06, "loss": 2.9859, "step": 502 }, { "epoch": 0.9289012003693444, "grad_norm": 4.340394020080566, "learning_rate": 1.506e-06, "loss": 2.964, "step": 503 }, { "epoch": 0.9307479224376731, "grad_norm": 10.615586280822754, "learning_rate": 1.5089999999999999e-06, "loss": 2.9981, "step": 504 }, { "epoch": 0.9325946445060018, "grad_norm": 4.364864826202393, "learning_rate": 1.512e-06, "loss": 2.9368, "step": 505 }, { "epoch": 0.9344413665743305, "grad_norm": 4.9499359130859375, "learning_rate": 1.5150000000000001e-06, "loss": 2.9529, "step": 506 }, { "epoch": 0.9362880886426593, "grad_norm": 3.906594753265381, "learning_rate": 1.518e-06, "loss": 2.9482, "step": 507 }, { "epoch": 0.938134810710988, "grad_norm": 4.437647819519043, "learning_rate": 1.5210000000000001e-06, "loss": 2.9554, "step": 508 }, { "epoch": 0.9399815327793167, "grad_norm": 3.466791868209839, "learning_rate": 1.524e-06, "loss": 2.9123, "step": 509 }, { "epoch": 0.9418282548476454, "grad_norm": 4.363769054412842, "learning_rate": 1.5270000000000002e-06, "loss": 2.9242, "step": 510 }, { "epoch": 0.9436749769159741, "grad_norm": 5.1405863761901855, "learning_rate": 1.53e-06, "loss": 2.9258, "step": 511 }, { "epoch": 0.9455216989843028, "grad_norm": 4.913668632507324, "learning_rate": 1.533e-06, "loss": 2.9457, "step": 512 }, { "epoch": 0.9473684210526315, "grad_norm": 6.878814220428467, "learning_rate": 1.5360000000000002e-06, "loss": 2.9271, "step": 513 }, { "epoch": 0.9492151431209603, "grad_norm": 5.439577102661133, "learning_rate": 1.539e-06, "loss": 2.9196, "step": 514 }, { "epoch": 0.951061865189289, "grad_norm": 3.7509171962738037, "learning_rate": 1.542e-06, "loss": 2.9413, "step": 515 }, { "epoch": 0.9529085872576177, "grad_norm": 6.960578918457031, "learning_rate": 1.545e-06, "loss": 2.9481, "step": 516 }, { "epoch": 0.9547553093259464, "grad_norm": 7.0822930335998535, "learning_rate": 1.548e-06, "loss": 2.9559, "step": 517 }, { "epoch": 0.9566020313942751, "grad_norm": 6.05672550201416, "learning_rate": 1.551e-06, "loss": 2.9432, "step": 518 }, { "epoch": 0.9584487534626038, "grad_norm": 5.092657566070557, "learning_rate": 1.554e-06, "loss": 2.9519, "step": 519 }, { "epoch": 0.9602954755309326, "grad_norm": 8.910967826843262, "learning_rate": 1.557e-06, "loss": 2.9207, "step": 520 }, { "epoch": 0.9621421975992613, "grad_norm": 6.184726238250732, "learning_rate": 1.5599999999999999e-06, "loss": 2.9386, "step": 521 }, { "epoch": 0.96398891966759, "grad_norm": 7.006811141967773, "learning_rate": 1.5630000000000001e-06, "loss": 2.9322, "step": 522 }, { "epoch": 0.9658356417359187, "grad_norm": 6.83420467376709, "learning_rate": 1.5660000000000001e-06, "loss": 2.9562, "step": 523 }, { "epoch": 0.9676823638042474, "grad_norm": 7.393522262573242, "learning_rate": 1.569e-06, "loss": 2.9554, "step": 524 }, { "epoch": 0.9695290858725761, "grad_norm": 5.073296070098877, "learning_rate": 1.5720000000000002e-06, "loss": 2.9534, "step": 525 }, { "epoch": 0.971375807940905, "grad_norm": 6.644114017486572, "learning_rate": 1.575e-06, "loss": 2.9545, "step": 526 }, { "epoch": 0.9732225300092336, "grad_norm": 5.469161510467529, "learning_rate": 1.578e-06, "loss": 2.934, "step": 527 }, { "epoch": 0.9750692520775623, "grad_norm": 8.649992942810059, "learning_rate": 1.581e-06, "loss": 2.9644, "step": 528 }, { "epoch": 0.976915974145891, "grad_norm": 5.227014064788818, "learning_rate": 1.584e-06, "loss": 2.8967, "step": 529 }, { "epoch": 0.9787626962142197, "grad_norm": 6.67928409576416, "learning_rate": 1.5870000000000002e-06, "loss": 2.9356, "step": 530 }, { "epoch": 0.9806094182825484, "grad_norm": 6.659364700317383, "learning_rate": 1.59e-06, "loss": 2.939, "step": 531 }, { "epoch": 0.9824561403508771, "grad_norm": 9.442721366882324, "learning_rate": 1.593e-06, "loss": 2.9603, "step": 532 }, { "epoch": 0.984302862419206, "grad_norm": 6.252249240875244, "learning_rate": 1.596e-06, "loss": 2.9177, "step": 533 }, { "epoch": 0.9861495844875346, "grad_norm": 6.107838153839111, "learning_rate": 1.599e-06, "loss": 2.9185, "step": 534 }, { "epoch": 0.9879963065558633, "grad_norm": 8.275815963745117, "learning_rate": 1.602e-06, "loss": 2.9011, "step": 535 }, { "epoch": 0.989843028624192, "grad_norm": 5.931856155395508, "learning_rate": 1.605e-06, "loss": 2.9344, "step": 536 }, { "epoch": 0.9916897506925207, "grad_norm": 7.1943511962890625, "learning_rate": 1.608e-06, "loss": 2.8943, "step": 537 }, { "epoch": 0.9935364727608494, "grad_norm": 12.691488265991211, "learning_rate": 1.611e-06, "loss": 2.9317, "step": 538 }, { "epoch": 0.9953831948291783, "grad_norm": 4.4045233726501465, "learning_rate": 1.6140000000000001e-06, "loss": 2.8875, "step": 539 }, { "epoch": 0.997229916897507, "grad_norm": 6.350393772125244, "learning_rate": 1.6170000000000001e-06, "loss": 2.9094, "step": 540 }, { "epoch": 0.9990766389658357, "grad_norm": 6.049736499786377, "learning_rate": 1.62e-06, "loss": 2.944, "step": 541 }, { "epoch": 1.0, "grad_norm": 4.06985330581665, "learning_rate": 1.6230000000000002e-06, "loss": 1.4558, "step": 542 }, { "epoch": 1.0018467220683287, "grad_norm": 12.09255599975586, "learning_rate": 1.626e-06, "loss": 3.0718, "step": 543 }, { "epoch": 1.0036934441366574, "grad_norm": 9.780098915100098, "learning_rate": 1.629e-06, "loss": 3.0656, "step": 544 }, { "epoch": 1.005540166204986, "grad_norm": 5.1886467933654785, "learning_rate": 1.632e-06, "loss": 3.0008, "step": 545 }, { "epoch": 1.0073868882733148, "grad_norm": 4.111461162567139, "learning_rate": 1.635e-06, "loss": 2.9359, "step": 546 }, { "epoch": 1.0092336103416435, "grad_norm": 5.650781154632568, "learning_rate": 1.638e-06, "loss": 2.9783, "step": 547 }, { "epoch": 1.0110803324099722, "grad_norm": 7.087272644042969, "learning_rate": 1.641e-06, "loss": 3.0137, "step": 548 }, { "epoch": 1.0129270544783011, "grad_norm": 6.621068477630615, "learning_rate": 1.644e-06, "loss": 2.9767, "step": 549 }, { "epoch": 1.0147737765466298, "grad_norm": 4.337469100952148, "learning_rate": 1.6469999999999999e-06, "loss": 2.948, "step": 550 }, { "epoch": 1.0166204986149585, "grad_norm": 3.3184478282928467, "learning_rate": 1.65e-06, "loss": 2.9006, "step": 551 }, { "epoch": 1.0184672206832872, "grad_norm": 3.802716016769409, "learning_rate": 1.653e-06, "loss": 2.9339, "step": 552 }, { "epoch": 1.020313942751616, "grad_norm": 5.8885297775268555, "learning_rate": 1.6560000000000001e-06, "loss": 2.9551, "step": 553 }, { "epoch": 1.0221606648199446, "grad_norm": 10.987329483032227, "learning_rate": 1.6590000000000001e-06, "loss": 2.9743, "step": 554 }, { "epoch": 1.0240073868882733, "grad_norm": 8.780963897705078, "learning_rate": 1.662e-06, "loss": 2.9448, "step": 555 }, { "epoch": 1.025854108956602, "grad_norm": 10.191997528076172, "learning_rate": 1.6650000000000002e-06, "loss": 2.971, "step": 556 }, { "epoch": 1.0277008310249307, "grad_norm": 6.121068954467773, "learning_rate": 1.668e-06, "loss": 2.8992, "step": 557 }, { "epoch": 1.0295475530932594, "grad_norm": 5.866390228271484, "learning_rate": 1.671e-06, "loss": 2.915, "step": 558 }, { "epoch": 1.031394275161588, "grad_norm": 4.83311653137207, "learning_rate": 1.6740000000000002e-06, "loss": 2.9023, "step": 559 }, { "epoch": 1.0332409972299168, "grad_norm": 12.177200317382812, "learning_rate": 1.677e-06, "loss": 2.938, "step": 560 }, { "epoch": 1.0350877192982457, "grad_norm": 11.86186695098877, "learning_rate": 1.68e-06, "loss": 2.9344, "step": 561 }, { "epoch": 1.0369344413665744, "grad_norm": 8.23930835723877, "learning_rate": 1.683e-06, "loss": 2.9468, "step": 562 }, { "epoch": 1.0387811634349031, "grad_norm": 5.7900166511535645, "learning_rate": 1.686e-06, "loss": 2.8912, "step": 563 }, { "epoch": 1.0406278855032318, "grad_norm": 9.679719924926758, "learning_rate": 1.689e-06, "loss": 2.8847, "step": 564 }, { "epoch": 1.0424746075715605, "grad_norm": 4.417483806610107, "learning_rate": 1.692e-06, "loss": 2.9201, "step": 565 }, { "epoch": 1.0443213296398892, "grad_norm": 11.755255699157715, "learning_rate": 1.695e-06, "loss": 2.9109, "step": 566 }, { "epoch": 1.046168051708218, "grad_norm": 6.306344985961914, "learning_rate": 1.6979999999999999e-06, "loss": 2.8868, "step": 567 }, { "epoch": 1.0480147737765466, "grad_norm": 7.374333381652832, "learning_rate": 1.701e-06, "loss": 2.9801, "step": 568 }, { "epoch": 1.0498614958448753, "grad_norm": 9.103034973144531, "learning_rate": 1.7040000000000001e-06, "loss": 2.9041, "step": 569 }, { "epoch": 1.051708217913204, "grad_norm": 6.525115013122559, "learning_rate": 1.707e-06, "loss": 2.9199, "step": 570 }, { "epoch": 1.0535549399815327, "grad_norm": 5.839273452758789, "learning_rate": 1.7100000000000001e-06, "loss": 2.9292, "step": 571 }, { "epoch": 1.0554016620498614, "grad_norm": 7.118470668792725, "learning_rate": 1.713e-06, "loss": 2.92, "step": 572 }, { "epoch": 1.05724838411819, "grad_norm": 7.868495464324951, "learning_rate": 1.7160000000000002e-06, "loss": 2.8916, "step": 573 }, { "epoch": 1.059095106186519, "grad_norm": 14.98963737487793, "learning_rate": 1.719e-06, "loss": 2.9395, "step": 574 }, { "epoch": 1.0609418282548477, "grad_norm": 6.7287774085998535, "learning_rate": 1.722e-06, "loss": 2.9224, "step": 575 }, { "epoch": 1.0627885503231764, "grad_norm": 6.143113613128662, "learning_rate": 1.7250000000000002e-06, "loss": 2.932, "step": 576 }, { "epoch": 1.0646352723915051, "grad_norm": 5.883518695831299, "learning_rate": 1.728e-06, "loss": 2.8912, "step": 577 }, { "epoch": 1.0664819944598338, "grad_norm": 6.572492599487305, "learning_rate": 1.731e-06, "loss": 2.9304, "step": 578 }, { "epoch": 1.0683287165281625, "grad_norm": 8.820541381835938, "learning_rate": 1.734e-06, "loss": 2.8974, "step": 579 }, { "epoch": 1.0701754385964912, "grad_norm": 8.882281303405762, "learning_rate": 1.737e-06, "loss": 2.9745, "step": 580 }, { "epoch": 1.07202216066482, "grad_norm": 14.256232261657715, "learning_rate": 1.74e-06, "loss": 2.9035, "step": 581 }, { "epoch": 1.0738688827331486, "grad_norm": 7.182823181152344, "learning_rate": 1.743e-06, "loss": 2.8644, "step": 582 }, { "epoch": 1.0757156048014773, "grad_norm": 7.0797600746154785, "learning_rate": 1.746e-06, "loss": 2.8771, "step": 583 }, { "epoch": 1.077562326869806, "grad_norm": 11.359004974365234, "learning_rate": 1.749e-06, "loss": 2.9435, "step": 584 }, { "epoch": 1.0794090489381347, "grad_norm": 21.062517166137695, "learning_rate": 1.7520000000000001e-06, "loss": 2.9306, "step": 585 }, { "epoch": 1.0812557710064636, "grad_norm": 10.017592430114746, "learning_rate": 1.7550000000000001e-06, "loss": 2.9346, "step": 586 }, { "epoch": 1.0831024930747923, "grad_norm": NaN, "learning_rate": 1.7550000000000001e-06, "loss": 2.8635, "step": 587 }, { "epoch": 1.084949215143121, "grad_norm": 8.263057708740234, "learning_rate": 1.758e-06, "loss": 2.9339, "step": 588 }, { "epoch": 1.0867959372114497, "grad_norm": 6.318991661071777, "learning_rate": 1.7610000000000002e-06, "loss": 2.8913, "step": 589 }, { "epoch": 1.0886426592797784, "grad_norm": 5.331015110015869, "learning_rate": 1.764e-06, "loss": 2.8813, "step": 590 }, { "epoch": 1.0904893813481071, "grad_norm": 7.7847089767456055, "learning_rate": 1.767e-06, "loss": 2.9171, "step": 591 }, { "epoch": 1.0923361034164358, "grad_norm": 4.977515697479248, "learning_rate": 1.77e-06, "loss": 2.8977, "step": 592 }, { "epoch": 1.0941828254847645, "grad_norm": 5.907567024230957, "learning_rate": 1.773e-06, "loss": 3.0373, "step": 593 }, { "epoch": 1.0960295475530932, "grad_norm": 4.246371269226074, "learning_rate": 1.776e-06, "loss": 2.9802, "step": 594 }, { "epoch": 1.097876269621422, "grad_norm": 3.133854389190674, "learning_rate": 1.779e-06, "loss": 2.9497, "step": 595 }, { "epoch": 1.0997229916897506, "grad_norm": 3.6142194271087646, "learning_rate": 1.782e-06, "loss": 2.9732, "step": 596 }, { "epoch": 1.1015697137580793, "grad_norm": 3.5081558227539062, "learning_rate": 1.785e-06, "loss": 2.934, "step": 597 }, { "epoch": 1.103416435826408, "grad_norm": 5.0536956787109375, "learning_rate": 1.788e-06, "loss": 2.9035, "step": 598 }, { "epoch": 1.1052631578947367, "grad_norm": 6.321654796600342, "learning_rate": 1.791e-06, "loss": 2.9239, "step": 599 }, { "epoch": 1.1071098799630656, "grad_norm": 6.200789451599121, "learning_rate": 1.794e-06, "loss": 2.9125, "step": 600 }, { "epoch": 1.1089566020313943, "grad_norm": 12.870471954345703, "learning_rate": 1.7970000000000001e-06, "loss": 2.9131, "step": 601 }, { "epoch": 1.110803324099723, "grad_norm": 4.161349296569824, "learning_rate": 1.8e-06, "loss": 2.9016, "step": 602 }, { "epoch": 1.1126500461680517, "grad_norm": 7.384085655212402, "learning_rate": 1.8030000000000001e-06, "loss": 2.9012, "step": 603 }, { "epoch": 1.1144967682363804, "grad_norm": 5.1679840087890625, "learning_rate": 1.806e-06, "loss": 2.9031, "step": 604 }, { "epoch": 1.1163434903047091, "grad_norm": 5.276459693908691, "learning_rate": 1.809e-06, "loss": 2.8899, "step": 605 }, { "epoch": 1.1181902123730378, "grad_norm": 5.313704967498779, "learning_rate": 1.8120000000000002e-06, "loss": 2.8708, "step": 606 }, { "epoch": 1.1200369344413665, "grad_norm": 5.521195888519287, "learning_rate": 1.815e-06, "loss": 2.9006, "step": 607 }, { "epoch": 1.1218836565096952, "grad_norm": 8.239352226257324, "learning_rate": 1.818e-06, "loss": 2.905, "step": 608 }, { "epoch": 1.123730378578024, "grad_norm": 5.124113082885742, "learning_rate": 1.821e-06, "loss": 2.8871, "step": 609 }, { "epoch": 1.1255771006463526, "grad_norm": 4.876293659210205, "learning_rate": 1.824e-06, "loss": 2.8934, "step": 610 }, { "epoch": 1.1274238227146816, "grad_norm": 10.052160263061523, "learning_rate": 1.827e-06, "loss": 2.9043, "step": 611 }, { "epoch": 1.1292705447830103, "grad_norm": 8.201723098754883, "learning_rate": 1.83e-06, "loss": 2.9086, "step": 612 }, { "epoch": 1.131117266851339, "grad_norm": 4.239204406738281, "learning_rate": 1.833e-06, "loss": 2.8758, "step": 613 }, { "epoch": 1.1329639889196677, "grad_norm": 5.192922592163086, "learning_rate": 1.8359999999999999e-06, "loss": 2.8823, "step": 614 }, { "epoch": 1.1348107109879964, "grad_norm": 8.6290864944458, "learning_rate": 1.839e-06, "loss": 2.8688, "step": 615 }, { "epoch": 1.136657433056325, "grad_norm": 6.887111663818359, "learning_rate": 1.8420000000000001e-06, "loss": 2.8978, "step": 616 }, { "epoch": 1.1385041551246537, "grad_norm": 4.787206172943115, "learning_rate": 1.8450000000000001e-06, "loss": 2.8983, "step": 617 }, { "epoch": 1.1403508771929824, "grad_norm": 10.718205451965332, "learning_rate": 1.8480000000000001e-06, "loss": 2.9018, "step": 618 }, { "epoch": 1.1421975992613111, "grad_norm": 6.42782735824585, "learning_rate": 1.851e-06, "loss": 2.8909, "step": 619 }, { "epoch": 1.1440443213296398, "grad_norm": 7.556795120239258, "learning_rate": 1.8540000000000002e-06, "loss": 2.9127, "step": 620 }, { "epoch": 1.1458910433979685, "grad_norm": 9.342985153198242, "learning_rate": 1.857e-06, "loss": 2.885, "step": 621 }, { "epoch": 1.1477377654662972, "grad_norm": 9.603569984436035, "learning_rate": 1.86e-06, "loss": 2.8918, "step": 622 }, { "epoch": 1.149584487534626, "grad_norm": 15.569653511047363, "learning_rate": 1.8630000000000002e-06, "loss": 2.8943, "step": 623 }, { "epoch": 1.1514312096029546, "grad_norm": 8.585500717163086, "learning_rate": 1.866e-06, "loss": 2.889, "step": 624 }, { "epoch": 1.1532779316712836, "grad_norm": 4.9472150802612305, "learning_rate": 1.869e-06, "loss": 2.8983, "step": 625 }, { "epoch": 1.1551246537396123, "grad_norm": 14.335213661193848, "learning_rate": 1.872e-06, "loss": 2.8734, "step": 626 }, { "epoch": 1.156971375807941, "grad_norm": 10.756952285766602, "learning_rate": 1.875e-06, "loss": 2.9226, "step": 627 }, { "epoch": 1.1588180978762697, "grad_norm": 5.745858192443848, "learning_rate": 1.878e-06, "loss": 2.8554, "step": 628 }, { "epoch": 1.1606648199445984, "grad_norm": 8.257650375366211, "learning_rate": 1.8810000000000003e-06, "loss": 2.9119, "step": 629 }, { "epoch": 1.162511542012927, "grad_norm": 13.96142292022705, "learning_rate": 1.8839999999999999e-06, "loss": 2.8432, "step": 630 }, { "epoch": 1.1643582640812558, "grad_norm": 8.9722900390625, "learning_rate": 1.8869999999999999e-06, "loss": 2.9012, "step": 631 }, { "epoch": 1.1662049861495845, "grad_norm": 5.260127544403076, "learning_rate": 1.8900000000000001e-06, "loss": 2.852, "step": 632 }, { "epoch": 1.1680517082179132, "grad_norm": 12.734224319458008, "learning_rate": 1.8930000000000001e-06, "loss": 2.8762, "step": 633 }, { "epoch": 1.1698984302862419, "grad_norm": 10.812736511230469, "learning_rate": 1.8960000000000001e-06, "loss": 2.8813, "step": 634 }, { "epoch": 1.1717451523545706, "grad_norm": 7.148223876953125, "learning_rate": 1.899e-06, "loss": 2.8701, "step": 635 }, { "epoch": 1.1735918744228995, "grad_norm": 10.687859535217285, "learning_rate": 1.902e-06, "loss": 2.8745, "step": 636 }, { "epoch": 1.1754385964912282, "grad_norm": 14.53646469116211, "learning_rate": 1.905e-06, "loss": 2.881, "step": 637 }, { "epoch": 1.1772853185595569, "grad_norm": 9.041234970092773, "learning_rate": 1.908e-06, "loss": 2.8867, "step": 638 }, { "epoch": 1.1791320406278856, "grad_norm": 6.838634967803955, "learning_rate": 1.9110000000000004e-06, "loss": 2.8243, "step": 639 }, { "epoch": 1.1809787626962143, "grad_norm": 6.505585193634033, "learning_rate": 1.914e-06, "loss": 2.8369, "step": 640 }, { "epoch": 1.182825484764543, "grad_norm": 4.176347732543945, "learning_rate": 1.917e-06, "loss": 2.8408, "step": 641 }, { "epoch": 1.1846722068328717, "grad_norm": 9.723844528198242, "learning_rate": 1.9200000000000003e-06, "loss": 2.8581, "step": 642 }, { "epoch": 1.1865189289012004, "grad_norm": 3.1684012413024902, "learning_rate": 1.923e-06, "loss": 2.9751, "step": 643 }, { "epoch": 1.188365650969529, "grad_norm": 3.2966151237487793, "learning_rate": 1.926e-06, "loss": 2.9243, "step": 644 }, { "epoch": 1.1902123730378578, "grad_norm": 3.4464704990386963, "learning_rate": 1.929e-06, "loss": 2.9082, "step": 645 }, { "epoch": 1.1920590951061865, "grad_norm": 2.4331507682800293, "learning_rate": 1.932e-06, "loss": 2.905, "step": 646 }, { "epoch": 1.1939058171745152, "grad_norm": 4.029451370239258, "learning_rate": 1.935e-06, "loss": 2.9147, "step": 647 }, { "epoch": 1.1957525392428439, "grad_norm": 15.655887603759766, "learning_rate": 1.9380000000000003e-06, "loss": 2.8587, "step": 648 }, { "epoch": 1.1975992613111726, "grad_norm": 4.774689197540283, "learning_rate": 1.9409999999999997e-06, "loss": 2.8423, "step": 649 }, { "epoch": 1.1994459833795015, "grad_norm": 3.9174821376800537, "learning_rate": 1.944e-06, "loss": 2.8417, "step": 650 }, { "epoch": 1.2012927054478302, "grad_norm": 2.8984720706939697, "learning_rate": 1.947e-06, "loss": 2.8605, "step": 651 }, { "epoch": 1.2031394275161589, "grad_norm": 5.679083824157715, "learning_rate": 1.95e-06, "loss": 2.8173, "step": 652 }, { "epoch": 1.2049861495844876, "grad_norm": 3.6044998168945312, "learning_rate": 1.953e-06, "loss": 2.8322, "step": 653 }, { "epoch": 1.2068328716528163, "grad_norm": 5.281006813049316, "learning_rate": 1.956e-06, "loss": 2.8368, "step": 654 }, { "epoch": 1.208679593721145, "grad_norm": 4.353389263153076, "learning_rate": 1.959e-06, "loss": 2.822, "step": 655 }, { "epoch": 1.2105263157894737, "grad_norm": 5.798543930053711, "learning_rate": 1.962e-06, "loss": 2.8404, "step": 656 }, { "epoch": 1.2123730378578024, "grad_norm": 5.485929489135742, "learning_rate": 1.9650000000000002e-06, "loss": 2.8379, "step": 657 }, { "epoch": 1.214219759926131, "grad_norm": 4.834137916564941, "learning_rate": 1.968e-06, "loss": 2.8366, "step": 658 }, { "epoch": 1.2160664819944598, "grad_norm": 5.665460586547852, "learning_rate": 1.971e-06, "loss": 2.8385, "step": 659 }, { "epoch": 1.2179132040627885, "grad_norm": 7.043797492980957, "learning_rate": 1.974e-06, "loss": 2.8221, "step": 660 }, { "epoch": 1.2197599261311174, "grad_norm": 5.808165073394775, "learning_rate": 1.977e-06, "loss": 2.8227, "step": 661 }, { "epoch": 1.221606648199446, "grad_norm": 21.553096771240234, "learning_rate": 1.98e-06, "loss": 2.8435, "step": 662 }, { "epoch": 1.2234533702677748, "grad_norm": 4.092555046081543, "learning_rate": 1.9830000000000003e-06, "loss": 2.8029, "step": 663 }, { "epoch": 1.2253000923361035, "grad_norm": 6.962827682495117, "learning_rate": 1.9859999999999997e-06, "loss": 2.8122, "step": 664 }, { "epoch": 1.2271468144044322, "grad_norm": 4.3983659744262695, "learning_rate": 1.989e-06, "loss": 2.833, "step": 665 }, { "epoch": 1.2289935364727609, "grad_norm": 4.870649814605713, "learning_rate": 1.992e-06, "loss": 2.8313, "step": 666 }, { "epoch": 1.2308402585410896, "grad_norm": 8.451695442199707, "learning_rate": 1.995e-06, "loss": 2.8211, "step": 667 }, { "epoch": 1.2326869806094183, "grad_norm": 9.19526481628418, "learning_rate": 1.998e-06, "loss": 2.8427, "step": 668 }, { "epoch": 1.234533702677747, "grad_norm": 7.379989147186279, "learning_rate": 2.001e-06, "loss": 2.8373, "step": 669 }, { "epoch": 1.2363804247460757, "grad_norm": 10.438669204711914, "learning_rate": 2.004e-06, "loss": 2.8293, "step": 670 }, { "epoch": 1.2382271468144044, "grad_norm": 3.9440643787384033, "learning_rate": 2.007e-06, "loss": 2.8231, "step": 671 }, { "epoch": 1.240073868882733, "grad_norm": 7.063518524169922, "learning_rate": 2.0100000000000002e-06, "loss": 2.846, "step": 672 }, { "epoch": 1.2419205909510618, "grad_norm": 5.866770267486572, "learning_rate": 2.0130000000000005e-06, "loss": 2.8227, "step": 673 }, { "epoch": 1.2437673130193905, "grad_norm": 4.861609935760498, "learning_rate": 2.016e-06, "loss": 2.8078, "step": 674 }, { "epoch": 1.2456140350877192, "grad_norm": 4.8793463706970215, "learning_rate": 2.019e-06, "loss": 2.8705, "step": 675 }, { "epoch": 1.247460757156048, "grad_norm": 5.221220016479492, "learning_rate": 2.0220000000000003e-06, "loss": 2.8138, "step": 676 }, { "epoch": 1.2493074792243768, "grad_norm": 6.798542022705078, "learning_rate": 2.025e-06, "loss": 2.795, "step": 677 }, { "epoch": 1.2511542012927055, "grad_norm": 6.260668754577637, "learning_rate": 2.028e-06, "loss": 2.8433, "step": 678 }, { "epoch": 1.2530009233610342, "grad_norm": 4.344798564910889, "learning_rate": 2.031e-06, "loss": 2.7893, "step": 679 }, { "epoch": 1.254847645429363, "grad_norm": 3.513256549835205, "learning_rate": 2.034e-06, "loss": 2.8129, "step": 680 }, { "epoch": 1.2566943674976916, "grad_norm": 5.4455718994140625, "learning_rate": 2.037e-06, "loss": 2.7959, "step": 681 }, { "epoch": 1.2585410895660203, "grad_norm": 9.517064094543457, "learning_rate": 2.0400000000000004e-06, "loss": 2.8195, "step": 682 }, { "epoch": 1.260387811634349, "grad_norm": 6.500871658325195, "learning_rate": 2.0429999999999998e-06, "loss": 2.8049, "step": 683 }, { "epoch": 1.2622345337026777, "grad_norm": 4.144886016845703, "learning_rate": 2.046e-06, "loss": 2.7864, "step": 684 }, { "epoch": 1.2640812557710064, "grad_norm": 5.43059778213501, "learning_rate": 2.049e-06, "loss": 2.7869, "step": 685 }, { "epoch": 1.2659279778393353, "grad_norm": 7.421367168426514, "learning_rate": 2.052e-06, "loss": 2.8101, "step": 686 }, { "epoch": 1.267774699907664, "grad_norm": 5.0669941902160645, "learning_rate": 2.0550000000000002e-06, "loss": 2.7979, "step": 687 }, { "epoch": 1.2696214219759927, "grad_norm": 5.999461650848389, "learning_rate": 2.058e-06, "loss": 2.7967, "step": 688 }, { "epoch": 1.2714681440443214, "grad_norm": 5.63998556137085, "learning_rate": 2.061e-06, "loss": 2.8188, "step": 689 }, { "epoch": 1.27331486611265, "grad_norm": 10.207146644592285, "learning_rate": 2.064e-06, "loss": 2.819, "step": 690 }, { "epoch": 1.2751615881809788, "grad_norm": 2.9560375213623047, "learning_rate": 2.0670000000000003e-06, "loss": 2.7793, "step": 691 }, { "epoch": 1.2770083102493075, "grad_norm": 3.9872069358825684, "learning_rate": 2.07e-06, "loss": 2.7723, "step": 692 }, { "epoch": 1.2788550323176362, "grad_norm": 9.619598388671875, "learning_rate": 2.073e-06, "loss": 2.927, "step": 693 }, { "epoch": 1.280701754385965, "grad_norm": 10.317651748657227, "learning_rate": 2.076e-06, "loss": 2.8825, "step": 694 }, { "epoch": 1.2825484764542936, "grad_norm": 4.086780071258545, "learning_rate": 2.079e-06, "loss": 2.8653, "step": 695 }, { "epoch": 1.2843951985226223, "grad_norm": 6.283045768737793, "learning_rate": 2.082e-06, "loss": 2.8542, "step": 696 }, { "epoch": 1.286241920590951, "grad_norm": 9.914119720458984, "learning_rate": 2.0850000000000004e-06, "loss": 2.8521, "step": 697 }, { "epoch": 1.2880886426592797, "grad_norm": 9.009072303771973, "learning_rate": 2.0879999999999997e-06, "loss": 2.861, "step": 698 }, { "epoch": 1.2899353647276084, "grad_norm": 5.214295864105225, "learning_rate": 2.091e-06, "loss": 2.8336, "step": 699 }, { "epoch": 1.291782086795937, "grad_norm": 4.523592472076416, "learning_rate": 2.094e-06, "loss": 2.7949, "step": 700 }, { "epoch": 1.293628808864266, "grad_norm": 5.53308629989624, "learning_rate": 2.097e-06, "loss": 2.8029, "step": 701 }, { "epoch": 1.2954755309325947, "grad_norm": 5.987646579742432, "learning_rate": 2.1000000000000002e-06, "loss": 2.7991, "step": 702 }, { "epoch": 1.2973222530009234, "grad_norm": 6.248629570007324, "learning_rate": 2.103e-06, "loss": 2.7892, "step": 703 }, { "epoch": 1.299168975069252, "grad_norm": 2.0059845447540283, "learning_rate": 2.106e-06, "loss": 2.7865, "step": 704 }, { "epoch": 1.3010156971375808, "grad_norm": 4.6807098388671875, "learning_rate": 2.109e-06, "loss": 2.7815, "step": 705 }, { "epoch": 1.3028624192059095, "grad_norm": 3.9518628120422363, "learning_rate": 2.1120000000000003e-06, "loss": 2.8086, "step": 706 }, { "epoch": 1.3047091412742382, "grad_norm": 4.064188003540039, "learning_rate": 2.1149999999999997e-06, "loss": 2.7868, "step": 707 }, { "epoch": 1.306555863342567, "grad_norm": 3.1247639656066895, "learning_rate": 2.118e-06, "loss": 2.7812, "step": 708 }, { "epoch": 1.3084025854108956, "grad_norm": 4.630458831787109, "learning_rate": 2.121e-06, "loss": 2.7804, "step": 709 }, { "epoch": 1.3102493074792243, "grad_norm": 22.884502410888672, "learning_rate": 2.124e-06, "loss": 2.748, "step": 710 }, { "epoch": 1.3120960295475532, "grad_norm": 4.427460670471191, "learning_rate": 2.127e-06, "loss": 2.8104, "step": 711 }, { "epoch": 1.313942751615882, "grad_norm": 4.474238872528076, "learning_rate": 2.13e-06, "loss": 2.7915, "step": 712 }, { "epoch": 1.3157894736842106, "grad_norm": 2.491396903991699, "learning_rate": 2.133e-06, "loss": 2.7305, "step": 713 }, { "epoch": 1.3176361957525393, "grad_norm": 4.121035099029541, "learning_rate": 2.136e-06, "loss": 2.7733, "step": 714 }, { "epoch": 1.319482917820868, "grad_norm": 4.982003211975098, "learning_rate": 2.139e-06, "loss": 2.7607, "step": 715 }, { "epoch": 1.3213296398891967, "grad_norm": 2.3569273948669434, "learning_rate": 2.1420000000000004e-06, "loss": 2.7735, "step": 716 }, { "epoch": 1.3231763619575254, "grad_norm": 4.553219318389893, "learning_rate": 2.145e-06, "loss": 2.7653, "step": 717 }, { "epoch": 1.3250230840258541, "grad_norm": 3.5110154151916504, "learning_rate": 2.148e-06, "loss": 2.7853, "step": 718 }, { "epoch": 1.3268698060941828, "grad_norm": 3.3325610160827637, "learning_rate": 2.1510000000000002e-06, "loss": 2.7661, "step": 719 }, { "epoch": 1.3287165281625115, "grad_norm": 5.185281753540039, "learning_rate": 2.154e-06, "loss": 2.7715, "step": 720 }, { "epoch": 1.3305632502308402, "grad_norm": 5.594532489776611, "learning_rate": 2.1570000000000003e-06, "loss": 2.7784, "step": 721 }, { "epoch": 1.332409972299169, "grad_norm": 5.433917999267578, "learning_rate": 2.16e-06, "loss": 2.7305, "step": 722 }, { "epoch": 1.3342566943674976, "grad_norm": 4.988367080688477, "learning_rate": 2.163e-06, "loss": 2.7773, "step": 723 }, { "epoch": 1.3361034164358263, "grad_norm": 3.9520010948181152, "learning_rate": 2.166e-06, "loss": 2.7538, "step": 724 }, { "epoch": 1.337950138504155, "grad_norm": 6.544180393218994, "learning_rate": 2.1690000000000003e-06, "loss": 2.7799, "step": 725 }, { "epoch": 1.3397968605724837, "grad_norm": 7.745989799499512, "learning_rate": 2.172e-06, "loss": 2.7307, "step": 726 }, { "epoch": 1.3416435826408126, "grad_norm": 4.38186502456665, "learning_rate": 2.175e-06, "loss": 2.8229, "step": 727 }, { "epoch": 1.3434903047091413, "grad_norm": 3.6738603115081787, "learning_rate": 2.178e-06, "loss": 2.7445, "step": 728 }, { "epoch": 1.34533702677747, "grad_norm": 4.38754940032959, "learning_rate": 2.181e-06, "loss": 2.7487, "step": 729 }, { "epoch": 1.3471837488457987, "grad_norm": 3.183619260787964, "learning_rate": 2.184e-06, "loss": 2.75, "step": 730 }, { "epoch": 1.3490304709141274, "grad_norm": 6.966119289398193, "learning_rate": 2.1870000000000004e-06, "loss": 2.765, "step": 731 }, { "epoch": 1.3508771929824561, "grad_norm": 4.263189792633057, "learning_rate": 2.1899999999999998e-06, "loss": 2.7622, "step": 732 }, { "epoch": 1.3527239150507848, "grad_norm": 2.978217124938965, "learning_rate": 2.193e-06, "loss": 2.7429, "step": 733 }, { "epoch": 1.3545706371191135, "grad_norm": 4.708694934844971, "learning_rate": 2.1960000000000002e-06, "loss": 2.7563, "step": 734 }, { "epoch": 1.3564173591874422, "grad_norm": 4.529090881347656, "learning_rate": 2.199e-06, "loss": 2.7436, "step": 735 }, { "epoch": 1.3582640812557711, "grad_norm": 4.785562992095947, "learning_rate": 2.2020000000000003e-06, "loss": 2.7406, "step": 736 }, { "epoch": 1.3601108033240998, "grad_norm": 10.316986083984375, "learning_rate": 2.205e-06, "loss": 2.7357, "step": 737 }, { "epoch": 1.3619575253924285, "grad_norm": 4.90836238861084, "learning_rate": 2.208e-06, "loss": 2.7566, "step": 738 }, { "epoch": 1.3638042474607572, "grad_norm": 8.626869201660156, "learning_rate": 2.211e-06, "loss": 2.7523, "step": 739 }, { "epoch": 1.365650969529086, "grad_norm": 11.956260681152344, "learning_rate": 2.2140000000000003e-06, "loss": 2.7543, "step": 740 }, { "epoch": 1.3674976915974146, "grad_norm": 5.312864303588867, "learning_rate": 2.2169999999999997e-06, "loss": 2.7477, "step": 741 }, { "epoch": 1.3693444136657433, "grad_norm": 4.989658355712891, "learning_rate": 2.22e-06, "loss": 2.7476, "step": 742 }, { "epoch": 1.371191135734072, "grad_norm": 6.6182637214660645, "learning_rate": 2.223e-06, "loss": 2.8991, "step": 743 }, { "epoch": 1.3730378578024007, "grad_norm": 6.964872360229492, "learning_rate": 2.226e-06, "loss": 2.8556, "step": 744 }, { "epoch": 1.3748845798707294, "grad_norm": 3.2420098781585693, "learning_rate": 2.229e-06, "loss": 2.7974, "step": 745 }, { "epoch": 1.3767313019390581, "grad_norm": 9.735184669494629, "learning_rate": 2.232e-06, "loss": 2.8501, "step": 746 }, { "epoch": 1.3785780240073868, "grad_norm": 10.228830337524414, "learning_rate": 2.2349999999999998e-06, "loss": 2.8392, "step": 747 }, { "epoch": 1.3804247460757155, "grad_norm": 10.16733455657959, "learning_rate": 2.238e-06, "loss": 2.8219, "step": 748 }, { "epoch": 1.3822714681440442, "grad_norm": 8.562270164489746, "learning_rate": 2.2410000000000002e-06, "loss": 2.7874, "step": 749 }, { "epoch": 1.384118190212373, "grad_norm": 7.701913833618164, "learning_rate": 2.244e-06, "loss": 2.7978, "step": 750 }, { "epoch": 1.3859649122807016, "grad_norm": 8.914648056030273, "learning_rate": 2.247e-06, "loss": 2.7869, "step": 751 }, { "epoch": 1.3878116343490305, "grad_norm": 11.16434383392334, "learning_rate": 2.25e-06, "loss": 2.7883, "step": 752 }, { "epoch": 1.3896583564173592, "grad_norm": 10.750763893127441, "learning_rate": 2.253e-06, "loss": 2.7952, "step": 753 }, { "epoch": 1.391505078485688, "grad_norm": 7.983006954193115, "learning_rate": 2.256e-06, "loss": 2.77, "step": 754 }, { "epoch": 1.3933518005540166, "grad_norm": 4.1849188804626465, "learning_rate": 2.2590000000000003e-06, "loss": 2.736, "step": 755 }, { "epoch": 1.3951985226223453, "grad_norm": 5.593008995056152, "learning_rate": 2.262e-06, "loss": 2.7679, "step": 756 }, { "epoch": 1.397045244690674, "grad_norm": 8.314484596252441, "learning_rate": 2.265e-06, "loss": 2.7588, "step": 757 }, { "epoch": 1.3988919667590027, "grad_norm": 10.190176010131836, "learning_rate": 2.268e-06, "loss": 2.7147, "step": 758 }, { "epoch": 1.4007386888273314, "grad_norm": 6.350856304168701, "learning_rate": 2.2710000000000004e-06, "loss": 2.726, "step": 759 }, { "epoch": 1.4025854108956601, "grad_norm": 2.3066318035125732, "learning_rate": 2.274e-06, "loss": 2.6919, "step": 760 }, { "epoch": 1.404432132963989, "grad_norm": 3.782069683074951, "learning_rate": 2.277e-06, "loss": 2.7338, "step": 761 }, { "epoch": 1.4062788550323178, "grad_norm": 4.446532726287842, "learning_rate": 2.28e-06, "loss": 2.7093, "step": 762 }, { "epoch": 1.4081255771006465, "grad_norm": 6.186867713928223, "learning_rate": 2.283e-06, "loss": 2.7388, "step": 763 }, { "epoch": 1.4099722991689752, "grad_norm": 5.769529819488525, "learning_rate": 2.2860000000000002e-06, "loss": 2.6958, "step": 764 }, { "epoch": 1.4118190212373039, "grad_norm": 4.115484237670898, "learning_rate": 2.2890000000000004e-06, "loss": 2.719, "step": 765 }, { "epoch": 1.4136657433056325, "grad_norm": 5.355208873748779, "learning_rate": 2.292e-06, "loss": 2.7121, "step": 766 }, { "epoch": 1.4155124653739612, "grad_norm": 4.1041035652160645, "learning_rate": 2.295e-06, "loss": 2.6666, "step": 767 }, { "epoch": 1.41735918744229, "grad_norm": 6.766019344329834, "learning_rate": 2.2980000000000003e-06, "loss": 2.7147, "step": 768 }, { "epoch": 1.4192059095106186, "grad_norm": 8.364627838134766, "learning_rate": 2.301e-06, "loss": 2.7387, "step": 769 }, { "epoch": 1.4210526315789473, "grad_norm": 5.883131504058838, "learning_rate": 2.304e-06, "loss": 2.7544, "step": 770 }, { "epoch": 1.422899353647276, "grad_norm": 5.160414695739746, "learning_rate": 2.307e-06, "loss": 2.7187, "step": 771 }, { "epoch": 1.4247460757156047, "grad_norm": 6.408952236175537, "learning_rate": 2.31e-06, "loss": 2.7144, "step": 772 }, { "epoch": 1.4265927977839334, "grad_norm": 4.58326530456543, "learning_rate": 2.313e-06, "loss": 2.6918, "step": 773 }, { "epoch": 1.4284395198522621, "grad_norm": 4.147642135620117, "learning_rate": 2.3160000000000004e-06, "loss": 2.7173, "step": 774 }, { "epoch": 1.4302862419205908, "grad_norm": 5.40259313583374, "learning_rate": 2.3189999999999997e-06, "loss": 2.7323, "step": 775 }, { "epoch": 1.4321329639889195, "grad_norm": 9.753005981445312, "learning_rate": 2.322e-06, "loss": 2.7337, "step": 776 }, { "epoch": 1.4339796860572485, "grad_norm": 4.117685794830322, "learning_rate": 2.325e-06, "loss": 2.7285, "step": 777 }, { "epoch": 1.4358264081255772, "grad_norm": 6.1139421463012695, "learning_rate": 2.328e-06, "loss": 2.7262, "step": 778 }, { "epoch": 1.4376731301939059, "grad_norm": 4.455411434173584, "learning_rate": 2.3310000000000002e-06, "loss": 2.7265, "step": 779 }, { "epoch": 1.4395198522622346, "grad_norm": 4.577296733856201, "learning_rate": 2.334e-06, "loss": 2.68, "step": 780 }, { "epoch": 1.4413665743305633, "grad_norm": 3.3163914680480957, "learning_rate": 2.337e-06, "loss": 2.7446, "step": 781 }, { "epoch": 1.443213296398892, "grad_norm": 9.669931411743164, "learning_rate": 2.34e-06, "loss": 2.7558, "step": 782 }, { "epoch": 1.4450600184672207, "grad_norm": 5.4542555809021, "learning_rate": 2.3430000000000003e-06, "loss": 2.7302, "step": 783 }, { "epoch": 1.4469067405355494, "grad_norm": 9.963101387023926, "learning_rate": 2.346e-06, "loss": 2.7001, "step": 784 }, { "epoch": 1.448753462603878, "grad_norm": 11.758763313293457, "learning_rate": 2.349e-06, "loss": 2.6938, "step": 785 }, { "epoch": 1.450600184672207, "grad_norm": 5.225876808166504, "learning_rate": 2.352e-06, "loss": 2.7307, "step": 786 }, { "epoch": 1.4524469067405357, "grad_norm": 14.65092945098877, "learning_rate": 2.355e-06, "loss": 2.6993, "step": 787 }, { "epoch": 1.4542936288088644, "grad_norm": 10.20727825164795, "learning_rate": 2.358e-06, "loss": 2.7313, "step": 788 }, { "epoch": 1.456140350877193, "grad_norm": 5.7672505378723145, "learning_rate": 2.3610000000000003e-06, "loss": 2.7177, "step": 789 }, { "epoch": 1.4579870729455218, "grad_norm": 9.435592651367188, "learning_rate": 2.3639999999999997e-06, "loss": 2.7246, "step": 790 }, { "epoch": 1.4598337950138505, "grad_norm": 26.1789608001709, "learning_rate": 2.367e-06, "loss": 2.745, "step": 791 }, { "epoch": 1.4616805170821792, "grad_norm": 3.997400999069214, "learning_rate": 2.37e-06, "loss": 2.7087, "step": 792 }, { "epoch": 1.4635272391505079, "grad_norm": 3.2992331981658936, "learning_rate": 2.373e-06, "loss": 2.8327, "step": 793 }, { "epoch": 1.4653739612188366, "grad_norm": 4.294439792633057, "learning_rate": 2.376e-06, "loss": 2.8169, "step": 794 }, { "epoch": 1.4672206832871653, "grad_norm": 4.262382507324219, "learning_rate": 2.379e-06, "loss": 2.7942, "step": 795 }, { "epoch": 1.469067405355494, "grad_norm": 6.730347633361816, "learning_rate": 2.382e-06, "loss": 2.8166, "step": 796 }, { "epoch": 1.4709141274238227, "grad_norm": 4.085928440093994, "learning_rate": 2.385e-06, "loss": 2.788, "step": 797 }, { "epoch": 1.4727608494921514, "grad_norm": 3.401494264602661, "learning_rate": 2.3880000000000003e-06, "loss": 2.7739, "step": 798 }, { "epoch": 1.47460757156048, "grad_norm": 6.391687870025635, "learning_rate": 2.391e-06, "loss": 2.771, "step": 799 }, { "epoch": 1.4764542936288088, "grad_norm": 9.127158164978027, "learning_rate": 2.394e-06, "loss": 2.765, "step": 800 }, { "epoch": 1.4783010156971375, "grad_norm": 7.535577774047852, "learning_rate": 2.397e-06, "loss": 2.755, "step": 801 }, { "epoch": 1.4801477377654664, "grad_norm": 2.549172878265381, "learning_rate": 2.4000000000000003e-06, "loss": 2.7157, "step": 802 }, { "epoch": 1.481994459833795, "grad_norm": 4.9487128257751465, "learning_rate": 2.403e-06, "loss": 2.7071, "step": 803 }, { "epoch": 1.4838411819021238, "grad_norm": 4.3724799156188965, "learning_rate": 2.406e-06, "loss": 2.7335, "step": 804 }, { "epoch": 1.4856879039704525, "grad_norm": 5.787751197814941, "learning_rate": 2.409e-06, "loss": 2.7254, "step": 805 }, { "epoch": 1.4875346260387812, "grad_norm": 3.1072328090667725, "learning_rate": 2.412e-06, "loss": 2.723, "step": 806 }, { "epoch": 1.4893813481071099, "grad_norm": 4.013876438140869, "learning_rate": 2.415e-06, "loss": 2.6765, "step": 807 }, { "epoch": 1.4912280701754386, "grad_norm": 5.162932872772217, "learning_rate": 2.4180000000000004e-06, "loss": 2.6791, "step": 808 }, { "epoch": 1.4930747922437673, "grad_norm": 3.466583728790283, "learning_rate": 2.4209999999999998e-06, "loss": 2.6731, "step": 809 }, { "epoch": 1.494921514312096, "grad_norm": 5.3154191970825195, "learning_rate": 2.424e-06, "loss": 2.7359, "step": 810 }, { "epoch": 1.4967682363804247, "grad_norm": 6.798288822174072, "learning_rate": 2.4270000000000002e-06, "loss": 2.7224, "step": 811 }, { "epoch": 1.4986149584487536, "grad_norm": 7.94334077835083, "learning_rate": 2.43e-06, "loss": 2.7205, "step": 812 }, { "epoch": 1.5004616805170823, "grad_norm": 6.905054092407227, "learning_rate": 2.4330000000000003e-06, "loss": 2.6951, "step": 813 }, { "epoch": 1.502308402585411, "grad_norm": 5.6237688064575195, "learning_rate": 2.436e-06, "loss": 2.6836, "step": 814 }, { "epoch": 1.5041551246537397, "grad_norm": 6.948147773742676, "learning_rate": 2.439e-06, "loss": 2.6905, "step": 815 }, { "epoch": 1.5060018467220684, "grad_norm": 7.7601847648620605, "learning_rate": 2.442e-06, "loss": 2.7105, "step": 816 }, { "epoch": 1.507848568790397, "grad_norm": 6.125829696655273, "learning_rate": 2.4450000000000003e-06, "loss": 2.6873, "step": 817 }, { "epoch": 1.5096952908587258, "grad_norm": 9.361186981201172, "learning_rate": 2.448e-06, "loss": 2.6749, "step": 818 }, { "epoch": 1.5115420129270545, "grad_norm": 5.572545528411865, "learning_rate": 2.451e-06, "loss": 2.6779, "step": 819 }, { "epoch": 1.5133887349953832, "grad_norm": 9.183470726013184, "learning_rate": 2.454e-06, "loss": 2.7265, "step": 820 }, { "epoch": 1.5152354570637119, "grad_norm": 5.680612087249756, "learning_rate": 2.457e-06, "loss": 2.6752, "step": 821 }, { "epoch": 1.5170821791320406, "grad_norm": 4.805209159851074, "learning_rate": 2.46e-06, "loss": 2.6951, "step": 822 }, { "epoch": 1.5189289012003693, "grad_norm": 7.211028099060059, "learning_rate": 2.4630000000000004e-06, "loss": 2.6981, "step": 823 }, { "epoch": 1.520775623268698, "grad_norm": 6.059957504272461, "learning_rate": 2.4659999999999998e-06, "loss": 2.6957, "step": 824 }, { "epoch": 1.5226223453370267, "grad_norm": 4.662355899810791, "learning_rate": 2.469e-06, "loss": 2.6932, "step": 825 }, { "epoch": 1.5244690674053554, "grad_norm": 9.695551872253418, "learning_rate": 2.4720000000000002e-06, "loss": 2.698, "step": 826 }, { "epoch": 1.526315789473684, "grad_norm": 6.460937976837158, "learning_rate": 2.475e-06, "loss": 2.6918, "step": 827 }, { "epoch": 1.5281625115420128, "grad_norm": 4.860535144805908, "learning_rate": 2.4780000000000002e-06, "loss": 2.6999, "step": 828 }, { "epoch": 1.5300092336103417, "grad_norm": 3.691718578338623, "learning_rate": 2.481e-06, "loss": 2.6793, "step": 829 }, { "epoch": 1.5318559556786704, "grad_norm": 6.444655895233154, "learning_rate": 2.484e-06, "loss": 2.6883, "step": 830 }, { "epoch": 1.533702677746999, "grad_norm": 4.39552116394043, "learning_rate": 2.487e-06, "loss": 2.6928, "step": 831 }, { "epoch": 1.5355493998153278, "grad_norm": 4.5506696701049805, "learning_rate": 2.4900000000000003e-06, "loss": 2.7049, "step": 832 }, { "epoch": 1.5373961218836565, "grad_norm": 3.6617109775543213, "learning_rate": 2.4929999999999997e-06, "loss": 2.698, "step": 833 }, { "epoch": 1.5392428439519852, "grad_norm": 2.4751598834991455, "learning_rate": 2.496e-06, "loss": 2.6257, "step": 834 }, { "epoch": 1.541089566020314, "grad_norm": 3.382323980331421, "learning_rate": 2.499e-06, "loss": 2.7211, "step": 835 }, { "epoch": 1.5429362880886428, "grad_norm": 5.438704967498779, "learning_rate": 2.502e-06, "loss": 2.6868, "step": 836 }, { "epoch": 1.5447830101569715, "grad_norm": 7.3822784423828125, "learning_rate": 2.505e-06, "loss": 2.6893, "step": 837 }, { "epoch": 1.5466297322253002, "grad_norm": 3.2348599433898926, "learning_rate": 2.508e-06, "loss": 2.6683, "step": 838 }, { "epoch": 1.548476454293629, "grad_norm": 4.4207377433776855, "learning_rate": 2.5109999999999998e-06, "loss": 2.6667, "step": 839 }, { "epoch": 1.5503231763619576, "grad_norm": 2.893880844116211, "learning_rate": 2.514e-06, "loss": 2.6793, "step": 840 }, { "epoch": 1.5521698984302863, "grad_norm": 3.3121488094329834, "learning_rate": 2.517e-06, "loss": 2.6794, "step": 841 }, { "epoch": 1.554016620498615, "grad_norm": 4.944551944732666, "learning_rate": 2.52e-06, "loss": 2.6549, "step": 842 }, { "epoch": 1.5558633425669437, "grad_norm": 4.086531162261963, "learning_rate": 2.523e-06, "loss": 2.8968, "step": 843 }, { "epoch": 1.5577100646352724, "grad_norm": 2.7039895057678223, "learning_rate": 2.526e-06, "loss": 2.8208, "step": 844 }, { "epoch": 1.559556786703601, "grad_norm": 2.362365245819092, "learning_rate": 2.5290000000000003e-06, "loss": 2.7717, "step": 845 }, { "epoch": 1.5614035087719298, "grad_norm": 5.044862747192383, "learning_rate": 2.532e-06, "loss": 2.7733, "step": 846 }, { "epoch": 1.5632502308402585, "grad_norm": 5.624716281890869, "learning_rate": 2.5350000000000003e-06, "loss": 2.7638, "step": 847 }, { "epoch": 1.5650969529085872, "grad_norm": 6.3323163986206055, "learning_rate": 2.538e-06, "loss": 2.7417, "step": 848 }, { "epoch": 1.566943674976916, "grad_norm": 4.66796350479126, "learning_rate": 2.541e-06, "loss": 2.707, "step": 849 }, { "epoch": 1.5687903970452446, "grad_norm": 6.403055191040039, "learning_rate": 2.544e-06, "loss": 2.7194, "step": 850 }, { "epoch": 1.5706371191135733, "grad_norm": 5.171673774719238, "learning_rate": 2.5470000000000003e-06, "loss": 2.7007, "step": 851 }, { "epoch": 1.572483841181902, "grad_norm": 3.812385320663452, "learning_rate": 2.55e-06, "loss": 2.7044, "step": 852 }, { "epoch": 1.5743305632502307, "grad_norm": 8.64167594909668, "learning_rate": 2.553e-06, "loss": 2.7368, "step": 853 }, { "epoch": 1.5761772853185596, "grad_norm": 6.886471271514893, "learning_rate": 2.556e-06, "loss": 2.6865, "step": 854 }, { "epoch": 1.5780240073868883, "grad_norm": 4.122204303741455, "learning_rate": 2.559e-06, "loss": 2.6864, "step": 855 }, { "epoch": 1.579870729455217, "grad_norm": 5.5636067390441895, "learning_rate": 2.562e-06, "loss": 2.6743, "step": 856 }, { "epoch": 1.5817174515235457, "grad_norm": 4.138462543487549, "learning_rate": 2.5650000000000004e-06, "loss": 2.6896, "step": 857 }, { "epoch": 1.5835641735918744, "grad_norm": 3.9760003089904785, "learning_rate": 2.568e-06, "loss": 2.6389, "step": 858 }, { "epoch": 1.585410895660203, "grad_norm": 3.1889688968658447, "learning_rate": 2.571e-06, "loss": 2.6655, "step": 859 }, { "epoch": 1.587257617728532, "grad_norm": 5.70900821685791, "learning_rate": 2.5740000000000003e-06, "loss": 2.6764, "step": 860 }, { "epoch": 1.5891043397968607, "grad_norm": 4.828238487243652, "learning_rate": 2.577e-06, "loss": 2.6499, "step": 861 }, { "epoch": 1.5909510618651894, "grad_norm": 3.9876034259796143, "learning_rate": 2.58e-06, "loss": 2.6698, "step": 862 }, { "epoch": 1.5927977839335181, "grad_norm": 3.536850690841675, "learning_rate": 2.583e-06, "loss": 2.6383, "step": 863 }, { "epoch": 1.5946445060018468, "grad_norm": 3.826817035675049, "learning_rate": 2.586e-06, "loss": 2.6255, "step": 864 }, { "epoch": 1.5964912280701755, "grad_norm": 6.136444091796875, "learning_rate": 2.589e-06, "loss": 2.6355, "step": 865 }, { "epoch": 1.5983379501385042, "grad_norm": 3.2077252864837646, "learning_rate": 2.5920000000000003e-06, "loss": 2.677, "step": 866 }, { "epoch": 1.600184672206833, "grad_norm": 3.540379762649536, "learning_rate": 2.5949999999999997e-06, "loss": 2.6584, "step": 867 }, { "epoch": 1.6020313942751616, "grad_norm": 3.182039737701416, "learning_rate": 2.598e-06, "loss": 2.6294, "step": 868 }, { "epoch": 1.6038781163434903, "grad_norm": 2.667015314102173, "learning_rate": 2.601e-06, "loss": 2.6402, "step": 869 }, { "epoch": 1.605724838411819, "grad_norm": 4.8609938621521, "learning_rate": 2.604e-06, "loss": 2.6315, "step": 870 }, { "epoch": 1.6075715604801477, "grad_norm": 3.460028648376465, "learning_rate": 2.607e-06, "loss": 2.5968, "step": 871 }, { "epoch": 1.6094182825484764, "grad_norm": 2.8984220027923584, "learning_rate": 2.61e-06, "loss": 2.6517, "step": 872 }, { "epoch": 1.611265004616805, "grad_norm": 5.12603759765625, "learning_rate": 2.613e-06, "loss": 2.5993, "step": 873 }, { "epoch": 1.6131117266851338, "grad_norm": 4.4763641357421875, "learning_rate": 2.616e-06, "loss": 2.6411, "step": 874 }, { "epoch": 1.6149584487534625, "grad_norm": 3.163438558578491, "learning_rate": 2.6190000000000003e-06, "loss": 2.619, "step": 875 }, { "epoch": 1.6168051708217912, "grad_norm": 5.125920295715332, "learning_rate": 2.622e-06, "loss": 2.609, "step": 876 }, { "epoch": 1.61865189289012, "grad_norm": 3.5490097999572754, "learning_rate": 2.625e-06, "loss": 2.6037, "step": 877 }, { "epoch": 1.6204986149584486, "grad_norm": 6.430298805236816, "learning_rate": 2.628e-06, "loss": 2.6346, "step": 878 }, { "epoch": 1.6223453370267775, "grad_norm": 3.71742844581604, "learning_rate": 2.631e-06, "loss": 2.6379, "step": 879 }, { "epoch": 1.6241920590951062, "grad_norm": 3.395528554916382, "learning_rate": 2.634e-06, "loss": 2.6065, "step": 880 }, { "epoch": 1.626038781163435, "grad_norm": 5.155135631561279, "learning_rate": 2.6370000000000003e-06, "loss": 2.6013, "step": 881 }, { "epoch": 1.6278855032317636, "grad_norm": 4.800289630889893, "learning_rate": 2.6399999999999997e-06, "loss": 2.6259, "step": 882 }, { "epoch": 1.6297322253000923, "grad_norm": 5.167157173156738, "learning_rate": 2.643e-06, "loss": 2.6065, "step": 883 }, { "epoch": 1.631578947368421, "grad_norm": 5.212462902069092, "learning_rate": 2.646e-06, "loss": 2.6042, "step": 884 }, { "epoch": 1.6334256694367497, "grad_norm": 3.7768590450286865, "learning_rate": 2.649e-06, "loss": 2.5636, "step": 885 }, { "epoch": 1.6352723915050786, "grad_norm": 3.708564281463623, "learning_rate": 2.652e-06, "loss": 2.624, "step": 886 }, { "epoch": 1.6371191135734073, "grad_norm": 3.8434557914733887, "learning_rate": 2.655e-06, "loss": 2.5539, "step": 887 }, { "epoch": 1.638965835641736, "grad_norm": 5.406363487243652, "learning_rate": 2.6580000000000002e-06, "loss": 2.5992, "step": 888 }, { "epoch": 1.6408125577100647, "grad_norm": 3.292588710784912, "learning_rate": 2.661e-06, "loss": 2.6162, "step": 889 }, { "epoch": 1.6426592797783934, "grad_norm": 6.086481094360352, "learning_rate": 2.6640000000000002e-06, "loss": 2.6225, "step": 890 }, { "epoch": 1.6445060018467221, "grad_norm": 5.598721504211426, "learning_rate": 2.6670000000000005e-06, "loss": 2.6216, "step": 891 }, { "epoch": 1.6463527239150508, "grad_norm": 4.04868221282959, "learning_rate": 2.67e-06, "loss": 2.5437, "step": 892 }, { "epoch": 1.6481994459833795, "grad_norm": 4.556248188018799, "learning_rate": 2.673e-06, "loss": 2.7949, "step": 893 }, { "epoch": 1.6500461680517082, "grad_norm": 3.4863927364349365, "learning_rate": 2.6760000000000003e-06, "loss": 2.7995, "step": 894 }, { "epoch": 1.651892890120037, "grad_norm": 2.3505067825317383, "learning_rate": 2.679e-06, "loss": 2.7359, "step": 895 }, { "epoch": 1.6537396121883656, "grad_norm": 2.904036521911621, "learning_rate": 2.682e-06, "loss": 2.7324, "step": 896 }, { "epoch": 1.6555863342566943, "grad_norm": 2.6287262439727783, "learning_rate": 2.685e-06, "loss": 2.7101, "step": 897 }, { "epoch": 1.657433056325023, "grad_norm": 4.635859966278076, "learning_rate": 2.688e-06, "loss": 2.6604, "step": 898 }, { "epoch": 1.6592797783933517, "grad_norm": 3.0158092975616455, "learning_rate": 2.691e-06, "loss": 2.6693, "step": 899 }, { "epoch": 1.6611265004616804, "grad_norm": 2.7617454528808594, "learning_rate": 2.6940000000000004e-06, "loss": 2.6603, "step": 900 }, { "epoch": 1.6629732225300091, "grad_norm": 4.1142449378967285, "learning_rate": 2.6969999999999998e-06, "loss": 2.6057, "step": 901 }, { "epoch": 1.6648199445983378, "grad_norm": 4.510326385498047, "learning_rate": 2.7e-06, "loss": 2.6286, "step": 902 }, { "epoch": 1.6666666666666665, "grad_norm": 3.2970147132873535, "learning_rate": 2.703e-06, "loss": 2.6388, "step": 903 }, { "epoch": 1.6685133887349952, "grad_norm": 4.057903289794922, "learning_rate": 2.706e-06, "loss": 2.6044, "step": 904 }, { "epoch": 1.6703601108033241, "grad_norm": 2.707749605178833, "learning_rate": 2.7090000000000002e-06, "loss": 2.5706, "step": 905 }, { "epoch": 1.6722068328716528, "grad_norm": 3.057931423187256, "learning_rate": 2.712e-06, "loss": 2.5373, "step": 906 }, { "epoch": 1.6740535549399815, "grad_norm": 3.6897811889648438, "learning_rate": 2.715e-06, "loss": 2.5179, "step": 907 }, { "epoch": 1.6759002770083102, "grad_norm": 3.4612832069396973, "learning_rate": 2.718e-06, "loss": 2.5037, "step": 908 }, { "epoch": 1.677746999076639, "grad_norm": 4.006030559539795, "learning_rate": 2.7210000000000003e-06, "loss": 2.5421, "step": 909 }, { "epoch": 1.6795937211449676, "grad_norm": 4.664487838745117, "learning_rate": 2.724e-06, "loss": 2.5214, "step": 910 }, { "epoch": 1.6814404432132966, "grad_norm": 4.485028266906738, "learning_rate": 2.727e-06, "loss": 2.4488, "step": 911 }, { "epoch": 1.6832871652816253, "grad_norm": 3.4627418518066406, "learning_rate": 2.73e-06, "loss": 2.4525, "step": 912 }, { "epoch": 1.685133887349954, "grad_norm": 3.839110851287842, "learning_rate": 2.733e-06, "loss": 2.4505, "step": 913 }, { "epoch": 1.6869806094182827, "grad_norm": 3.6342220306396484, "learning_rate": 2.736e-06, "loss": 2.4108, "step": 914 }, { "epoch": 1.6888273314866113, "grad_norm": 4.183078765869141, "learning_rate": 2.7390000000000004e-06, "loss": 2.4424, "step": 915 }, { "epoch": 1.69067405355494, "grad_norm": 4.1377854347229, "learning_rate": 2.7419999999999998e-06, "loss": 2.3767, "step": 916 }, { "epoch": 1.6925207756232687, "grad_norm": 3.869332790374756, "learning_rate": 2.745e-06, "loss": 2.3823, "step": 917 }, { "epoch": 1.6943674976915974, "grad_norm": 3.835867166519165, "learning_rate": 2.748e-06, "loss": 2.3853, "step": 918 }, { "epoch": 1.6962142197599261, "grad_norm": 5.005284309387207, "learning_rate": 2.751e-06, "loss": 2.3881, "step": 919 }, { "epoch": 1.6980609418282548, "grad_norm": 4.473355770111084, "learning_rate": 2.7540000000000002e-06, "loss": 2.3436, "step": 920 }, { "epoch": 1.6999076638965835, "grad_norm": 3.1353838443756104, "learning_rate": 2.757e-06, "loss": 2.3374, "step": 921 }, { "epoch": 1.7017543859649122, "grad_norm": 3.464097499847412, "learning_rate": 2.76e-06, "loss": 2.2889, "step": 922 }, { "epoch": 1.703601108033241, "grad_norm": 3.300783634185791, "learning_rate": 2.763e-06, "loss": 2.2898, "step": 923 }, { "epoch": 1.7054478301015696, "grad_norm": 4.972532272338867, "learning_rate": 2.7660000000000003e-06, "loss": 2.2912, "step": 924 }, { "epoch": 1.7072945521698983, "grad_norm": 5.833555698394775, "learning_rate": 2.7689999999999997e-06, "loss": 2.2812, "step": 925 }, { "epoch": 1.709141274238227, "grad_norm": 4.416055202484131, "learning_rate": 2.772e-06, "loss": 2.2297, "step": 926 }, { "epoch": 1.7109879963065557, "grad_norm": 8.705913543701172, "learning_rate": 2.775e-06, "loss": 2.243, "step": 927 }, { "epoch": 1.7128347183748844, "grad_norm": 3.3832077980041504, "learning_rate": 2.778e-06, "loss": 2.2515, "step": 928 }, { "epoch": 1.7146814404432131, "grad_norm": 4.412527561187744, "learning_rate": 2.781e-06, "loss": 2.193, "step": 929 }, { "epoch": 1.716528162511542, "grad_norm": 4.890414237976074, "learning_rate": 2.784e-06, "loss": 2.2242, "step": 930 }, { "epoch": 1.7183748845798708, "grad_norm": 4.201949596405029, "learning_rate": 2.787e-06, "loss": 2.1902, "step": 931 }, { "epoch": 1.7202216066481995, "grad_norm": 3.7861242294311523, "learning_rate": 2.79e-06, "loss": 2.188, "step": 932 }, { "epoch": 1.7220683287165282, "grad_norm": 4.323360443115234, "learning_rate": 2.793e-06, "loss": 2.1455, "step": 933 }, { "epoch": 1.7239150507848569, "grad_norm": 2.85317325592041, "learning_rate": 2.7960000000000004e-06, "loss": 2.099, "step": 934 }, { "epoch": 1.7257617728531855, "grad_norm": 3.6395180225372314, "learning_rate": 2.799e-06, "loss": 2.0754, "step": 935 }, { "epoch": 1.7276084949215145, "grad_norm": 4.379446983337402, "learning_rate": 2.802e-06, "loss": 2.0919, "step": 936 }, { "epoch": 1.7294552169898432, "grad_norm": 3.4269096851348877, "learning_rate": 2.8050000000000002e-06, "loss": 2.0443, "step": 937 }, { "epoch": 1.7313019390581719, "grad_norm": 4.84335994720459, "learning_rate": 2.808e-06, "loss": 2.0577, "step": 938 }, { "epoch": 1.7331486611265006, "grad_norm": 7.587337493896484, "learning_rate": 2.8110000000000003e-06, "loss": 2.0765, "step": 939 }, { "epoch": 1.7349953831948293, "grad_norm": 6.143586158752441, "learning_rate": 2.814e-06, "loss": 2.0361, "step": 940 }, { "epoch": 1.736842105263158, "grad_norm": 4.004847526550293, "learning_rate": 2.817e-06, "loss": 1.9983, "step": 941 }, { "epoch": 1.7386888273314867, "grad_norm": 6.8099260330200195, "learning_rate": 2.82e-06, "loss": 2.0316, "step": 942 }, { "epoch": 1.7405355493998154, "grad_norm": 9.880424499511719, "learning_rate": 2.8230000000000003e-06, "loss": 2.4311, "step": 943 }, { "epoch": 1.742382271468144, "grad_norm": 6.309079170227051, "learning_rate": 2.826e-06, "loss": 2.3447, "step": 944 }, { "epoch": 1.7442289935364728, "grad_norm": 4.839874744415283, "learning_rate": 2.829e-06, "loss": 2.2527, "step": 945 }, { "epoch": 1.7460757156048015, "grad_norm": 3.779041051864624, "learning_rate": 2.832e-06, "loss": 2.1876, "step": 946 }, { "epoch": 1.7479224376731302, "grad_norm": 6.362606048583984, "learning_rate": 2.835e-06, "loss": 2.1817, "step": 947 }, { "epoch": 1.7497691597414589, "grad_norm": 8.506185531616211, "learning_rate": 2.838e-06, "loss": 2.1166, "step": 948 }, { "epoch": 1.7516158818097876, "grad_norm": 5.72320032119751, "learning_rate": 2.8410000000000004e-06, "loss": 2.1144, "step": 949 }, { "epoch": 1.7534626038781163, "grad_norm": 4.872931957244873, "learning_rate": 2.844e-06, "loss": 2.0904, "step": 950 }, { "epoch": 1.755309325946445, "grad_norm": 5.8115153312683105, "learning_rate": 2.847e-06, "loss": 2.0425, "step": 951 }, { "epoch": 1.7571560480147737, "grad_norm": 5.148961067199707, "learning_rate": 2.8500000000000002e-06, "loss": 1.9555, "step": 952 }, { "epoch": 1.7590027700831024, "grad_norm": 4.77873420715332, "learning_rate": 2.853e-06, "loss": 1.8868, "step": 953 }, { "epoch": 1.760849492151431, "grad_norm": 6.63554573059082, "learning_rate": 2.8560000000000003e-06, "loss": 1.9266, "step": 954 }, { "epoch": 1.76269621421976, "grad_norm": 5.592360019683838, "learning_rate": 2.859e-06, "loss": 1.9111, "step": 955 }, { "epoch": 1.7645429362880887, "grad_norm": 6.387895107269287, "learning_rate": 2.862e-06, "loss": 1.8245, "step": 956 }, { "epoch": 1.7663896583564174, "grad_norm": 5.357057571411133, "learning_rate": 2.865e-06, "loss": 1.748, "step": 957 }, { "epoch": 1.768236380424746, "grad_norm": 13.47465991973877, "learning_rate": 2.8680000000000003e-06, "loss": 1.7151, "step": 958 }, { "epoch": 1.7700831024930748, "grad_norm": 3.3738362789154053, "learning_rate": 2.8709999999999997e-06, "loss": 1.673, "step": 959 }, { "epoch": 1.7719298245614035, "grad_norm": 4.385458946228027, "learning_rate": 2.874e-06, "loss": 1.7046, "step": 960 }, { "epoch": 1.7737765466297324, "grad_norm": 2.9951484203338623, "learning_rate": 2.877e-06, "loss": 1.6725, "step": 961 }, { "epoch": 1.775623268698061, "grad_norm": 5.280602931976318, "learning_rate": 2.88e-06, "loss": 1.6102, "step": 962 }, { "epoch": 1.7774699907663898, "grad_norm": 3.529313802719116, "learning_rate": 2.883e-06, "loss": 1.5605, "step": 963 }, { "epoch": 1.7793167128347185, "grad_norm": 8.768348693847656, "learning_rate": 2.886e-06, "loss": 1.5792, "step": 964 }, { "epoch": 1.7811634349030472, "grad_norm": 4.621875286102295, "learning_rate": 2.8889999999999998e-06, "loss": 1.5769, "step": 965 }, { "epoch": 1.7830101569713759, "grad_norm": 2.7766876220703125, "learning_rate": 2.892e-06, "loss": 1.522, "step": 966 }, { "epoch": 1.7848568790397046, "grad_norm": 3.9150314331054688, "learning_rate": 2.8950000000000002e-06, "loss": 1.539, "step": 967 }, { "epoch": 1.7867036011080333, "grad_norm": 7.411330699920654, "learning_rate": 2.898e-06, "loss": 1.4637, "step": 968 }, { "epoch": 1.788550323176362, "grad_norm": 4.648345470428467, "learning_rate": 2.901e-06, "loss": 1.4333, "step": 969 }, { "epoch": 1.7903970452446907, "grad_norm": 4.080045223236084, "learning_rate": 2.904e-06, "loss": 1.4157, "step": 970 }, { "epoch": 1.7922437673130194, "grad_norm": 15.073267936706543, "learning_rate": 2.907e-06, "loss": 1.3991, "step": 971 }, { "epoch": 1.794090489381348, "grad_norm": 5.176401138305664, "learning_rate": 2.91e-06, "loss": 1.3885, "step": 972 }, { "epoch": 1.7959372114496768, "grad_norm": 3.8407390117645264, "learning_rate": 2.9130000000000003e-06, "loss": 1.3605, "step": 973 }, { "epoch": 1.7977839335180055, "grad_norm": 3.0771899223327637, "learning_rate": 2.916e-06, "loss": 1.3694, "step": 974 }, { "epoch": 1.7996306555863342, "grad_norm": 4.342832088470459, "learning_rate": 2.919e-06, "loss": 1.2812, "step": 975 }, { "epoch": 1.8014773776546629, "grad_norm": 3.102047920227051, "learning_rate": 2.922e-06, "loss": 1.2985, "step": 976 }, { "epoch": 1.8033240997229916, "grad_norm": 2.795923948287964, "learning_rate": 2.9250000000000004e-06, "loss": 1.2971, "step": 977 }, { "epoch": 1.8051708217913203, "grad_norm": 6.470213413238525, "learning_rate": 2.928e-06, "loss": 1.1874, "step": 978 }, { "epoch": 1.807017543859649, "grad_norm": 9.83168888092041, "learning_rate": 2.931e-06, "loss": 1.2674, "step": 979 }, { "epoch": 1.8088642659279779, "grad_norm": 5.468801498413086, "learning_rate": 2.934e-06, "loss": 1.2251, "step": 980 }, { "epoch": 1.8107109879963066, "grad_norm": 4.2570109367370605, "learning_rate": 2.937e-06, "loss": 1.1727, "step": 981 }, { "epoch": 1.8125577100646353, "grad_norm": 7.420529842376709, "learning_rate": 2.9400000000000002e-06, "loss": 1.1752, "step": 982 }, { "epoch": 1.814404432132964, "grad_norm": NaN, "learning_rate": 2.9400000000000002e-06, "loss": 1.185, "step": 983 }, { "epoch": 1.8162511542012927, "grad_norm": 4.976733207702637, "learning_rate": 2.9430000000000005e-06, "loss": 1.178, "step": 984 }, { "epoch": 1.8180978762696214, "grad_norm": 4.1213765144348145, "learning_rate": 2.946e-06, "loss": 1.1264, "step": 985 }, { "epoch": 1.8199445983379503, "grad_norm": 3.392833948135376, "learning_rate": 2.949e-06, "loss": 1.1685, "step": 986 }, { "epoch": 1.821791320406279, "grad_norm": 21.531436920166016, "learning_rate": 2.9520000000000003e-06, "loss": 1.1395, "step": 987 }, { "epoch": 1.8236380424746077, "grad_norm": 4.77427339553833, "learning_rate": 2.955e-06, "loss": 1.1045, "step": 988 }, { "epoch": 1.8254847645429364, "grad_norm": 5.12446928024292, "learning_rate": 2.958e-06, "loss": 1.1141, "step": 989 }, { "epoch": 1.827331486611265, "grad_norm": 4.657517910003662, "learning_rate": 2.961e-06, "loss": 1.0613, "step": 990 }, { "epoch": 1.8291782086795938, "grad_norm": 9.723724365234375, "learning_rate": 2.964e-06, "loss": 1.0681, "step": 991 }, { "epoch": 1.8310249307479225, "grad_norm": 3.2556118965148926, "learning_rate": 2.967e-06, "loss": 1.1434, "step": 992 }, { "epoch": 1.8328716528162512, "grad_norm": 10.147623062133789, "learning_rate": 2.9700000000000004e-06, "loss": 1.8507, "step": 993 }, { "epoch": 1.83471837488458, "grad_norm": 5.80134391784668, "learning_rate": 2.9729999999999997e-06, "loss": 1.5426, "step": 994 }, { "epoch": 1.8365650969529086, "grad_norm": 4.6190876960754395, "learning_rate": 2.976e-06, "loss": 1.486, "step": 995 }, { "epoch": 1.8384118190212373, "grad_norm": 3.1225485801696777, "learning_rate": 2.979e-06, "loss": 1.4601, "step": 996 }, { "epoch": 1.840258541089566, "grad_norm": 3.720115900039673, "learning_rate": 2.982e-06, "loss": 1.4278, "step": 997 }, { "epoch": 1.8421052631578947, "grad_norm": 10.240564346313477, "learning_rate": 2.9850000000000002e-06, "loss": 1.2362, "step": 998 }, { "epoch": 1.8439519852262234, "grad_norm": 5.202827453613281, "learning_rate": 2.988e-06, "loss": 1.2611, "step": 999 }, { "epoch": 1.845798707294552, "grad_norm": 4.68275785446167, "learning_rate": 2.991e-06, "loss": 1.2884, "step": 1000 }, { "epoch": 1.845798707294552, "eval_cer": 0.3808227700443269, "eval_loss": 1.387242317199707, "eval_runtime": 16.2128, "eval_samples_per_second": 18.751, "eval_steps_per_second": 0.617, "eval_wer": 1.0113200306983883, "step": 1000 }, { "epoch": 1.8476454293628808, "grad_norm": 5.817086219787598, "learning_rate": 2.994e-06, "loss": 1.2335, "step": 1001 }, { "epoch": 1.8494921514312095, "grad_norm": 2.8305552005767822, "learning_rate": 2.9970000000000003e-06, "loss": 1.1889, "step": 1002 }, { "epoch": 1.8513388734995382, "grad_norm": 5.681527614593506, "learning_rate": 3e-06, "loss": 1.1404, "step": 1003 }, { "epoch": 1.8531855955678669, "grad_norm": 4.868209362030029, "learning_rate": 3.003e-06, "loss": 1.1088, "step": 1004 }, { "epoch": 1.8550323176361958, "grad_norm": 4.205574989318848, "learning_rate": 3.006e-06, "loss": 1.0609, "step": 1005 }, { "epoch": 1.8568790397045245, "grad_norm": 9.881863594055176, "learning_rate": 3.009e-06, "loss": 0.9499, "step": 1006 }, { "epoch": 1.8587257617728532, "grad_norm": 3.2579362392425537, "learning_rate": 3.012e-06, "loss": 0.8826, "step": 1007 }, { "epoch": 1.860572483841182, "grad_norm": 5.675721645355225, "learning_rate": 3.0150000000000004e-06, "loss": 0.873, "step": 1008 }, { "epoch": 1.8624192059095106, "grad_norm": 4.980535507202148, "learning_rate": 3.0179999999999997e-06, "loss": 0.8653, "step": 1009 }, { "epoch": 1.8642659279778393, "grad_norm": 11.08716869354248, "learning_rate": 3.021e-06, "loss": 0.903, "step": 1010 }, { "epoch": 1.866112650046168, "grad_norm": 4.356354236602783, "learning_rate": 3.024e-06, "loss": 0.939, "step": 1011 }, { "epoch": 1.867959372114497, "grad_norm": 6.092064380645752, "learning_rate": 3.027e-06, "loss": 0.8441, "step": 1012 }, { "epoch": 1.8698060941828256, "grad_norm": 10.622693061828613, "learning_rate": 3.0300000000000002e-06, "loss": 0.7909, "step": 1013 }, { "epoch": 1.8716528162511543, "grad_norm": 5.019896030426025, "learning_rate": 3.033e-06, "loss": 0.8149, "step": 1014 }, { "epoch": 1.873499538319483, "grad_norm": 3.3237733840942383, "learning_rate": 3.036e-06, "loss": 0.741, "step": 1015 }, { "epoch": 1.8753462603878117, "grad_norm": 4.057783603668213, "learning_rate": 3.039e-06, "loss": 0.7615, "step": 1016 }, { "epoch": 1.8771929824561404, "grad_norm": 3.0382447242736816, "learning_rate": 3.0420000000000003e-06, "loss": 0.7755, "step": 1017 }, { "epoch": 1.8790397045244691, "grad_norm": 3.909538745880127, "learning_rate": 3.0450000000000005e-06, "loss": 0.7565, "step": 1018 }, { "epoch": 1.8808864265927978, "grad_norm": 7.082792282104492, "learning_rate": 3.048e-06, "loss": 0.8341, "step": 1019 }, { "epoch": 1.8827331486611265, "grad_norm": 3.4757003784179688, "learning_rate": 3.051e-06, "loss": 0.7641, "step": 1020 }, { "epoch": 1.8845798707294552, "grad_norm": 4.288313388824463, "learning_rate": 3.0540000000000003e-06, "loss": 0.7306, "step": 1021 }, { "epoch": 1.886426592797784, "grad_norm": 12.425573348999023, "learning_rate": 3.057e-06, "loss": 0.6782, "step": 1022 }, { "epoch": 1.8882733148661126, "grad_norm": 3.1311159133911133, "learning_rate": 3.06e-06, "loss": 0.7367, "step": 1023 }, { "epoch": 1.8901200369344413, "grad_norm": 3.636770248413086, "learning_rate": 3.063e-06, "loss": 0.7476, "step": 1024 }, { "epoch": 1.89196675900277, "grad_norm": 3.5470006465911865, "learning_rate": 3.066e-06, "loss": 0.7078, "step": 1025 }, { "epoch": 1.8938134810710987, "grad_norm": 4.514321804046631, "learning_rate": 3.069e-06, "loss": 0.6822, "step": 1026 }, { "epoch": 1.8956602031394274, "grad_norm": 4.672080993652344, "learning_rate": 3.0720000000000004e-06, "loss": 0.6728, "step": 1027 }, { "epoch": 1.897506925207756, "grad_norm": 20.278226852416992, "learning_rate": 3.0749999999999998e-06, "loss": 0.6897, "step": 1028 }, { "epoch": 1.8993536472760848, "grad_norm": 7.047516345977783, "learning_rate": 3.078e-06, "loss": 0.6794, "step": 1029 }, { "epoch": 1.9012003693444137, "grad_norm": 2.9382872581481934, "learning_rate": 3.0810000000000002e-06, "loss": 0.738, "step": 1030 }, { "epoch": 1.9030470914127424, "grad_norm": 3.0094921588897705, "learning_rate": 3.084e-06, "loss": 0.7073, "step": 1031 }, { "epoch": 1.9048938134810711, "grad_norm": 20.426923751831055, "learning_rate": 3.0870000000000003e-06, "loss": 0.6801, "step": 1032 }, { "epoch": 1.9067405355493998, "grad_norm": 5.870213508605957, "learning_rate": 3.09e-06, "loss": 0.6999, "step": 1033 }, { "epoch": 1.9085872576177285, "grad_norm": 4.475285530090332, "learning_rate": 3.093e-06, "loss": 0.6844, "step": 1034 }, { "epoch": 1.9104339796860572, "grad_norm": 24.2203311920166, "learning_rate": 3.096e-06, "loss": 0.6208, "step": 1035 }, { "epoch": 1.912280701754386, "grad_norm": 4.748508453369141, "learning_rate": 3.0990000000000003e-06, "loss": 0.6121, "step": 1036 }, { "epoch": 1.9141274238227148, "grad_norm": 3.6154093742370605, "learning_rate": 3.102e-06, "loss": 0.6886, "step": 1037 }, { "epoch": 1.9159741458910435, "grad_norm": 3.9529449939727783, "learning_rate": 3.105e-06, "loss": 0.6453, "step": 1038 }, { "epoch": 1.9178208679593722, "grad_norm": 18.980297088623047, "learning_rate": 3.108e-06, "loss": 0.6588, "step": 1039 }, { "epoch": 1.919667590027701, "grad_norm": 4.728965759277344, "learning_rate": 3.111e-06, "loss": 0.6229, "step": 1040 }, { "epoch": 1.9215143120960296, "grad_norm": 8.780813217163086, "learning_rate": 3.114e-06, "loss": 0.7132, "step": 1041 }, { "epoch": 1.9233610341643583, "grad_norm": 7.412342071533203, "learning_rate": 3.1170000000000004e-06, "loss": 0.8253, "step": 1042 }, { "epoch": 1.925207756232687, "grad_norm": 8.90256118774414, "learning_rate": 3.1199999999999998e-06, "loss": 1.3585, "step": 1043 }, { "epoch": 1.9270544783010157, "grad_norm": 4.4571428298950195, "learning_rate": 3.123e-06, "loss": 1.1706, "step": 1044 }, { "epoch": 1.9289012003693444, "grad_norm": 4.3071184158325195, "learning_rate": 3.1260000000000002e-06, "loss": 1.0833, "step": 1045 }, { "epoch": 1.9307479224376731, "grad_norm": 9.08230972290039, "learning_rate": 3.129e-06, "loss": 0.9173, "step": 1046 }, { "epoch": 1.9325946445060018, "grad_norm": 5.986129283905029, "learning_rate": 3.1320000000000003e-06, "loss": 0.886, "step": 1047 }, { "epoch": 1.9344413665743305, "grad_norm": 4.313248157501221, "learning_rate": 3.135e-06, "loss": 0.9184, "step": 1048 }, { "epoch": 1.9362880886426592, "grad_norm": 3.475137948989868, "learning_rate": 3.138e-06, "loss": 0.8592, "step": 1049 }, { "epoch": 1.938134810710988, "grad_norm": 3.4487123489379883, "learning_rate": 3.141e-06, "loss": 0.8229, "step": 1050 }, { "epoch": 1.9399815327793166, "grad_norm": 5.085602760314941, "learning_rate": 3.1440000000000003e-06, "loss": 0.7537, "step": 1051 }, { "epoch": 1.9418282548476453, "grad_norm": 3.04370379447937, "learning_rate": 3.1469999999999997e-06, "loss": 0.778, "step": 1052 }, { "epoch": 1.943674976915974, "grad_norm": 3.9167068004608154, "learning_rate": 3.15e-06, "loss": 0.6315, "step": 1053 }, { "epoch": 1.9455216989843027, "grad_norm": 2.40921950340271, "learning_rate": 3.153e-06, "loss": 0.6394, "step": 1054 }, { "epoch": 1.9473684210526314, "grad_norm": 9.868019104003906, "learning_rate": 3.156e-06, "loss": 0.6353, "step": 1055 }, { "epoch": 1.9492151431209603, "grad_norm": 2.789872646331787, "learning_rate": 3.159e-06, "loss": 0.5673, "step": 1056 }, { "epoch": 1.951061865189289, "grad_norm": 2.2566208839416504, "learning_rate": 3.162e-06, "loss": 0.6103, "step": 1057 }, { "epoch": 1.9529085872576177, "grad_norm": 3.410893678665161, "learning_rate": 3.1649999999999998e-06, "loss": 0.5088, "step": 1058 }, { "epoch": 1.9547553093259464, "grad_norm": 2.58414626121521, "learning_rate": 3.168e-06, "loss": 0.5453, "step": 1059 }, { "epoch": 1.9566020313942751, "grad_norm": 6.431880950927734, "learning_rate": 3.1710000000000002e-06, "loss": 0.4953, "step": 1060 }, { "epoch": 1.9584487534626038, "grad_norm": 2.7993009090423584, "learning_rate": 3.1740000000000004e-06, "loss": 0.5452, "step": 1061 }, { "epoch": 1.9602954755309328, "grad_norm": 5.0864386558532715, "learning_rate": 3.177e-06, "loss": 0.5177, "step": 1062 }, { "epoch": 1.9621421975992615, "grad_norm": 2.416808605194092, "learning_rate": 3.18e-06, "loss": 0.5564, "step": 1063 }, { "epoch": 1.9639889196675901, "grad_norm": 3.0072779655456543, "learning_rate": 3.1830000000000003e-06, "loss": 0.4664, "step": 1064 }, { "epoch": 1.9658356417359188, "grad_norm": 3.39021635055542, "learning_rate": 3.186e-06, "loss": 0.5069, "step": 1065 }, { "epoch": 1.9676823638042475, "grad_norm": 2.3784401416778564, "learning_rate": 3.1890000000000003e-06, "loss": 0.5095, "step": 1066 }, { "epoch": 1.9695290858725762, "grad_norm": 2.980907440185547, "learning_rate": 3.192e-06, "loss": 0.581, "step": 1067 }, { "epoch": 1.971375807940905, "grad_norm": 3.834425687789917, "learning_rate": 3.195e-06, "loss": 0.5368, "step": 1068 }, { "epoch": 1.9732225300092336, "grad_norm": 4.101207733154297, "learning_rate": 3.198e-06, "loss": 0.5162, "step": 1069 }, { "epoch": 1.9750692520775623, "grad_norm": 2.8639001846313477, "learning_rate": 3.2010000000000004e-06, "loss": 0.4687, "step": 1070 }, { "epoch": 1.976915974145891, "grad_norm": 3.660947561264038, "learning_rate": 3.204e-06, "loss": 0.5063, "step": 1071 }, { "epoch": 1.9787626962142197, "grad_norm": 4.399283409118652, "learning_rate": 3.207e-06, "loss": 0.5655, "step": 1072 }, { "epoch": 1.9806094182825484, "grad_norm": 5.054996967315674, "learning_rate": 3.21e-06, "loss": 0.5052, "step": 1073 }, { "epoch": 1.9824561403508771, "grad_norm": 3.4301655292510986, "learning_rate": 3.213e-06, "loss": 0.484, "step": 1074 }, { "epoch": 1.9843028624192058, "grad_norm": 3.875305652618408, "learning_rate": 3.216e-06, "loss": 0.5369, "step": 1075 }, { "epoch": 1.9861495844875345, "grad_norm": 4.462026119232178, "learning_rate": 3.2190000000000004e-06, "loss": 0.5141, "step": 1076 }, { "epoch": 1.9879963065558632, "grad_norm": 4.290552139282227, "learning_rate": 3.222e-06, "loss": 0.5084, "step": 1077 }, { "epoch": 1.989843028624192, "grad_norm": 2.693206787109375, "learning_rate": 3.225e-06, "loss": 0.5143, "step": 1078 }, { "epoch": 1.9916897506925206, "grad_norm": 13.908217430114746, "learning_rate": 3.2280000000000003e-06, "loss": 0.5154, "step": 1079 }, { "epoch": 1.9935364727608493, "grad_norm": 4.1457600593566895, "learning_rate": 3.231e-06, "loss": 0.5337, "step": 1080 }, { "epoch": 1.9953831948291783, "grad_norm": 4.087045669555664, "learning_rate": 3.2340000000000003e-06, "loss": 0.5313, "step": 1081 }, { "epoch": 1.997229916897507, "grad_norm": 6.225906848907471, "learning_rate": 3.237e-06, "loss": 0.5336, "step": 1082 }, { "epoch": 1.9990766389658357, "grad_norm": 4.523451328277588, "learning_rate": 3.24e-06, "loss": 0.5495, "step": 1083 }, { "epoch": 2.0, "grad_norm": 3.176987648010254, "learning_rate": 3.243e-06, "loss": 0.3116, "step": 1084 }, { "epoch": 2.0018467220683287, "grad_norm": 5.960220813751221, "learning_rate": 3.2460000000000003e-06, "loss": 1.2542, "step": 1085 }, { "epoch": 2.0036934441366574, "grad_norm": 5.376857757568359, "learning_rate": 3.2489999999999997e-06, "loss": 1.1583, "step": 1086 }, { "epoch": 2.005540166204986, "grad_norm": 3.4985902309417725, "learning_rate": 3.252e-06, "loss": 0.9727, "step": 1087 }, { "epoch": 2.007386888273315, "grad_norm": 3.4755091667175293, "learning_rate": 3.255e-06, "loss": 1.0657, "step": 1088 }, { "epoch": 2.0092336103416435, "grad_norm": 2.7386229038238525, "learning_rate": 3.258e-06, "loss": 0.8495, "step": 1089 }, { "epoch": 2.011080332409972, "grad_norm": 4.1003737449646, "learning_rate": 3.261e-06, "loss": 0.8668, "step": 1090 }, { "epoch": 2.012927054478301, "grad_norm": 6.044220447540283, "learning_rate": 3.264e-06, "loss": 0.8225, "step": 1091 }, { "epoch": 2.0147737765466296, "grad_norm": 3.062387704849243, "learning_rate": 3.267e-06, "loss": 0.6906, "step": 1092 }, { "epoch": 2.0166204986149583, "grad_norm": 5.289513111114502, "learning_rate": 3.27e-06, "loss": 0.6469, "step": 1093 }, { "epoch": 2.018467220683287, "grad_norm": 2.689310312271118, "learning_rate": 3.2730000000000003e-06, "loss": 0.7619, "step": 1094 }, { "epoch": 2.0203139427516157, "grad_norm": 6.174566745758057, "learning_rate": 3.276e-06, "loss": 0.6249, "step": 1095 }, { "epoch": 2.0221606648199444, "grad_norm": 6.018412113189697, "learning_rate": 3.279e-06, "loss": 0.5918, "step": 1096 }, { "epoch": 2.0240073868882735, "grad_norm": 2.6255674362182617, "learning_rate": 3.282e-06, "loss": 0.578, "step": 1097 }, { "epoch": 2.0258541089566022, "grad_norm": 2.5936782360076904, "learning_rate": 3.285e-06, "loss": 0.6471, "step": 1098 }, { "epoch": 2.027700831024931, "grad_norm": 2.8347206115722656, "learning_rate": 3.288e-06, "loss": 0.5161, "step": 1099 }, { "epoch": 2.0295475530932596, "grad_norm": 3.6363515853881836, "learning_rate": 3.2910000000000003e-06, "loss": 0.4732, "step": 1100 }, { "epoch": 2.0313942751615883, "grad_norm": 2.8886845111846924, "learning_rate": 3.2939999999999997e-06, "loss": 0.4759, "step": 1101 }, { "epoch": 2.033240997229917, "grad_norm": 5.658815383911133, "learning_rate": 3.297e-06, "loss": 0.4244, "step": 1102 }, { "epoch": 2.0350877192982457, "grad_norm": 3.8610455989837646, "learning_rate": 3.3e-06, "loss": 0.5042, "step": 1103 }, { "epoch": 2.0369344413665744, "grad_norm": 3.7909250259399414, "learning_rate": 3.3030000000000004e-06, "loss": 0.4401, "step": 1104 }, { "epoch": 2.038781163434903, "grad_norm": 3.0346169471740723, "learning_rate": 3.306e-06, "loss": 0.4517, "step": 1105 }, { "epoch": 2.040627885503232, "grad_norm": 2.9240636825561523, "learning_rate": 3.309e-06, "loss": 0.512, "step": 1106 }, { "epoch": 2.0424746075715605, "grad_norm": 3.549503803253174, "learning_rate": 3.3120000000000002e-06, "loss": 0.4491, "step": 1107 }, { "epoch": 2.044321329639889, "grad_norm": 2.4286088943481445, "learning_rate": 3.315e-06, "loss": 0.4317, "step": 1108 }, { "epoch": 2.046168051708218, "grad_norm": 4.359600067138672, "learning_rate": 3.3180000000000003e-06, "loss": 0.507, "step": 1109 }, { "epoch": 2.0480147737765466, "grad_norm": 2.9661221504211426, "learning_rate": 3.3210000000000005e-06, "loss": 0.4248, "step": 1110 }, { "epoch": 2.0498614958448753, "grad_norm": 18.271678924560547, "learning_rate": 3.324e-06, "loss": 0.4179, "step": 1111 }, { "epoch": 2.051708217913204, "grad_norm": 3.5711114406585693, "learning_rate": 3.327e-06, "loss": 0.4596, "step": 1112 }, { "epoch": 2.0535549399815327, "grad_norm": 3.6461987495422363, "learning_rate": 3.3300000000000003e-06, "loss": 0.4043, "step": 1113 }, { "epoch": 2.0554016620498614, "grad_norm": 3.886255979537964, "learning_rate": 3.333e-06, "loss": 0.4284, "step": 1114 }, { "epoch": 2.05724838411819, "grad_norm": 3.654627799987793, "learning_rate": 3.336e-06, "loss": 0.4275, "step": 1115 }, { "epoch": 2.059095106186519, "grad_norm": 2.4884819984436035, "learning_rate": 3.339e-06, "loss": 0.3802, "step": 1116 }, { "epoch": 2.0609418282548475, "grad_norm": 10.350143432617188, "learning_rate": 3.342e-06, "loss": 0.4153, "step": 1117 }, { "epoch": 2.062788550323176, "grad_norm": 4.766338348388672, "learning_rate": 3.345e-06, "loss": 0.4242, "step": 1118 }, { "epoch": 2.064635272391505, "grad_norm": 2.0844154357910156, "learning_rate": 3.3480000000000004e-06, "loss": 0.3617, "step": 1119 }, { "epoch": 2.0664819944598336, "grad_norm": 5.149382591247559, "learning_rate": 3.3509999999999998e-06, "loss": 0.4822, "step": 1120 }, { "epoch": 2.0683287165281623, "grad_norm": 8.087337493896484, "learning_rate": 3.354e-06, "loss": 0.4138, "step": 1121 }, { "epoch": 2.0701754385964914, "grad_norm": 3.428081750869751, "learning_rate": 3.3570000000000002e-06, "loss": 0.3851, "step": 1122 }, { "epoch": 2.07202216066482, "grad_norm": 2.2771971225738525, "learning_rate": 3.36e-06, "loss": 0.4297, "step": 1123 }, { "epoch": 2.073868882733149, "grad_norm": 3.1540608406066895, "learning_rate": 3.3630000000000002e-06, "loss": 0.3744, "step": 1124 }, { "epoch": 2.0757156048014775, "grad_norm": 4.224101543426514, "learning_rate": 3.366e-06, "loss": 0.4036, "step": 1125 }, { "epoch": 2.0775623268698062, "grad_norm": 3.432368278503418, "learning_rate": 3.369e-06, "loss": 0.3774, "step": 1126 }, { "epoch": 2.079409048938135, "grad_norm": 4.711674690246582, "learning_rate": 3.372e-06, "loss": 0.3861, "step": 1127 }, { "epoch": 2.0812557710064636, "grad_norm": 4.5501909255981445, "learning_rate": 3.3750000000000003e-06, "loss": 0.4297, "step": 1128 }, { "epoch": 2.0831024930747923, "grad_norm": 8.730445861816406, "learning_rate": 3.378e-06, "loss": 0.3753, "step": 1129 }, { "epoch": 2.084949215143121, "grad_norm": 5.603017330169678, "learning_rate": 3.381e-06, "loss": 0.4272, "step": 1130 }, { "epoch": 2.0867959372114497, "grad_norm": 3.613166332244873, "learning_rate": 3.384e-06, "loss": 0.432, "step": 1131 }, { "epoch": 2.0886426592797784, "grad_norm": 4.397871494293213, "learning_rate": 3.387e-06, "loss": 0.4113, "step": 1132 }, { "epoch": 2.090489381348107, "grad_norm": 3.521243095397949, "learning_rate": 3.39e-06, "loss": 0.433, "step": 1133 }, { "epoch": 2.092336103416436, "grad_norm": 4.172756671905518, "learning_rate": 3.3930000000000004e-06, "loss": 0.4833, "step": 1134 }, { "epoch": 2.0941828254847645, "grad_norm": 6.483964920043945, "learning_rate": 3.3959999999999998e-06, "loss": 1.2138, "step": 1135 }, { "epoch": 2.0960295475530932, "grad_norm": 3.9152333736419678, "learning_rate": 3.399e-06, "loss": 0.9175, "step": 1136 }, { "epoch": 2.097876269621422, "grad_norm": 4.725359916687012, "learning_rate": 3.402e-06, "loss": 0.9543, "step": 1137 }, { "epoch": 2.0997229916897506, "grad_norm": 2.669206142425537, "learning_rate": 3.405e-06, "loss": 0.8489, "step": 1138 }, { "epoch": 2.1015697137580793, "grad_norm": 7.135278224945068, "learning_rate": 3.4080000000000002e-06, "loss": 0.8881, "step": 1139 }, { "epoch": 2.103416435826408, "grad_norm": 4.369724273681641, "learning_rate": 3.411e-06, "loss": 0.7659, "step": 1140 }, { "epoch": 2.1052631578947367, "grad_norm": 6.003192901611328, "learning_rate": 3.414e-06, "loss": 0.8159, "step": 1141 }, { "epoch": 2.1071098799630654, "grad_norm": 4.358618259429932, "learning_rate": 3.417e-06, "loss": 0.6835, "step": 1142 }, { "epoch": 2.108956602031394, "grad_norm": 5.704505920410156, "learning_rate": 3.4200000000000003e-06, "loss": 0.6636, "step": 1143 }, { "epoch": 2.110803324099723, "grad_norm": 5.818912506103516, "learning_rate": 3.4229999999999997e-06, "loss": 0.6394, "step": 1144 }, { "epoch": 2.1126500461680515, "grad_norm": 4.240537166595459, "learning_rate": 3.426e-06, "loss": 0.5845, "step": 1145 }, { "epoch": 2.11449676823638, "grad_norm": 4.361617088317871, "learning_rate": 3.429e-06, "loss": 0.5257, "step": 1146 }, { "epoch": 2.1163434903047094, "grad_norm": 5.02565860748291, "learning_rate": 3.4320000000000003e-06, "loss": 0.5765, "step": 1147 }, { "epoch": 2.118190212373038, "grad_norm": 2.9467175006866455, "learning_rate": 3.435e-06, "loss": 0.6254, "step": 1148 }, { "epoch": 2.1200369344413668, "grad_norm": 2.518874168395996, "learning_rate": 3.438e-06, "loss": 0.4371, "step": 1149 }, { "epoch": 2.1218836565096955, "grad_norm": 3.232793092727661, "learning_rate": 3.441e-06, "loss": 0.386, "step": 1150 }, { "epoch": 2.123730378578024, "grad_norm": 2.345193862915039, "learning_rate": 3.444e-06, "loss": 0.3993, "step": 1151 }, { "epoch": 2.125577100646353, "grad_norm": 4.542880535125732, "learning_rate": 3.447e-06, "loss": 0.4761, "step": 1152 }, { "epoch": 2.1274238227146816, "grad_norm": 5.272704124450684, "learning_rate": 3.4500000000000004e-06, "loss": 0.4417, "step": 1153 }, { "epoch": 2.1292705447830103, "grad_norm": 2.8965117931365967, "learning_rate": 3.453e-06, "loss": 0.409, "step": 1154 }, { "epoch": 2.131117266851339, "grad_norm": 15.167119026184082, "learning_rate": 3.456e-06, "loss": 0.3611, "step": 1155 }, { "epoch": 2.1329639889196677, "grad_norm": 5.197674751281738, "learning_rate": 3.4590000000000003e-06, "loss": 0.3397, "step": 1156 }, { "epoch": 2.1348107109879964, "grad_norm": 2.644599199295044, "learning_rate": 3.462e-06, "loss": 0.3809, "step": 1157 }, { "epoch": 2.136657433056325, "grad_norm": 14.982160568237305, "learning_rate": 3.4650000000000003e-06, "loss": 0.3453, "step": 1158 }, { "epoch": 2.1385041551246537, "grad_norm": 2.48482346534729, "learning_rate": 3.468e-06, "loss": 0.351, "step": 1159 }, { "epoch": 2.1403508771929824, "grad_norm": 2.524677038192749, "learning_rate": 3.471e-06, "loss": 0.306, "step": 1160 }, { "epoch": 2.142197599261311, "grad_norm": 4.211189270019531, "learning_rate": 3.474e-06, "loss": 0.3509, "step": 1161 }, { "epoch": 2.14404432132964, "grad_norm": 2.2646098136901855, "learning_rate": 3.4770000000000003e-06, "loss": 0.3004, "step": 1162 }, { "epoch": 2.1458910433979685, "grad_norm": 2.701345443725586, "learning_rate": 3.48e-06, "loss": 0.3862, "step": 1163 }, { "epoch": 2.1477377654662972, "grad_norm": 2.695441961288452, "learning_rate": 3.483e-06, "loss": 0.441, "step": 1164 }, { "epoch": 2.149584487534626, "grad_norm": 3.7059075832366943, "learning_rate": 3.486e-06, "loss": 0.3228, "step": 1165 }, { "epoch": 2.1514312096029546, "grad_norm": 5.615117073059082, "learning_rate": 3.489e-06, "loss": 0.4142, "step": 1166 }, { "epoch": 2.1532779316712833, "grad_norm": 3.990037441253662, "learning_rate": 3.492e-06, "loss": 0.3822, "step": 1167 }, { "epoch": 2.155124653739612, "grad_norm": 2.899252414703369, "learning_rate": 3.4950000000000004e-06, "loss": 0.3355, "step": 1168 }, { "epoch": 2.1569713758079407, "grad_norm": 3.141970634460449, "learning_rate": 3.498e-06, "loss": 0.3535, "step": 1169 }, { "epoch": 2.1588180978762694, "grad_norm": 2.840134620666504, "learning_rate": 3.501e-06, "loss": 0.3351, "step": 1170 }, { "epoch": 2.160664819944598, "grad_norm": 2.2682883739471436, "learning_rate": 3.5040000000000002e-06, "loss": 0.3443, "step": 1171 }, { "epoch": 2.1625115420129273, "grad_norm": 3.5608057975769043, "learning_rate": 3.507e-06, "loss": 0.3911, "step": 1172 }, { "epoch": 2.164358264081256, "grad_norm": 3.4463419914245605, "learning_rate": 3.5100000000000003e-06, "loss": 0.3563, "step": 1173 }, { "epoch": 2.1662049861495847, "grad_norm": 3.2165534496307373, "learning_rate": 3.513e-06, "loss": 0.3671, "step": 1174 }, { "epoch": 2.1680517082179134, "grad_norm": 4.781904220581055, "learning_rate": 3.516e-06, "loss": 0.3282, "step": 1175 }, { "epoch": 2.169898430286242, "grad_norm": 22.736854553222656, "learning_rate": 3.519e-06, "loss": 0.375, "step": 1176 }, { "epoch": 2.1717451523545708, "grad_norm": 3.0178444385528564, "learning_rate": 3.5220000000000003e-06, "loss": 0.3934, "step": 1177 }, { "epoch": 2.1735918744228995, "grad_norm": 7.673983573913574, "learning_rate": 3.5249999999999997e-06, "loss": 0.3651, "step": 1178 }, { "epoch": 2.175438596491228, "grad_norm": 2.905626058578491, "learning_rate": 3.528e-06, "loss": 0.3587, "step": 1179 }, { "epoch": 2.177285318559557, "grad_norm": 5.361052989959717, "learning_rate": 3.531e-06, "loss": 0.3424, "step": 1180 }, { "epoch": 2.1791320406278856, "grad_norm": 2.9514048099517822, "learning_rate": 3.534e-06, "loss": 0.3506, "step": 1181 }, { "epoch": 2.1809787626962143, "grad_norm": 6.453454494476318, "learning_rate": 3.537e-06, "loss": 0.4183, "step": 1182 }, { "epoch": 2.182825484764543, "grad_norm": 4.925314903259277, "learning_rate": 3.54e-06, "loss": 0.4085, "step": 1183 }, { "epoch": 2.1846722068328717, "grad_norm": 3.5400233268737793, "learning_rate": 3.543e-06, "loss": 0.3729, "step": 1184 }, { "epoch": 2.1865189289012004, "grad_norm": 7.280455589294434, "learning_rate": 3.546e-06, "loss": 1.11, "step": 1185 }, { "epoch": 2.188365650969529, "grad_norm": 3.9851276874542236, "learning_rate": 3.5490000000000002e-06, "loss": 0.9891, "step": 1186 }, { "epoch": 2.1902123730378578, "grad_norm": 3.3686535358428955, "learning_rate": 3.552e-06, "loss": 0.907, "step": 1187 }, { "epoch": 2.1920590951061865, "grad_norm": 3.4856481552124023, "learning_rate": 3.555e-06, "loss": 0.7925, "step": 1188 }, { "epoch": 2.193905817174515, "grad_norm": 3.7457706928253174, "learning_rate": 3.558e-06, "loss": 0.6853, "step": 1189 }, { "epoch": 2.195752539242844, "grad_norm": 5.772076606750488, "learning_rate": 3.5610000000000003e-06, "loss": 0.6675, "step": 1190 }, { "epoch": 2.1975992613111726, "grad_norm": 3.661681890487671, "learning_rate": 3.564e-06, "loss": 0.5446, "step": 1191 }, { "epoch": 2.1994459833795013, "grad_norm": 3.1582486629486084, "learning_rate": 3.5670000000000003e-06, "loss": 0.5409, "step": 1192 }, { "epoch": 2.20129270544783, "grad_norm": 3.120471954345703, "learning_rate": 3.57e-06, "loss": 0.5944, "step": 1193 }, { "epoch": 2.2031394275161587, "grad_norm": 3.0466837882995605, "learning_rate": 3.573e-06, "loss": 0.5486, "step": 1194 }, { "epoch": 2.2049861495844874, "grad_norm": 2.113574743270874, "learning_rate": 3.576e-06, "loss": 0.516, "step": 1195 }, { "epoch": 2.206832871652816, "grad_norm": 2.616821527481079, "learning_rate": 3.5790000000000004e-06, "loss": 0.5089, "step": 1196 }, { "epoch": 2.208679593721145, "grad_norm": 3.7034990787506104, "learning_rate": 3.582e-06, "loss": 0.4616, "step": 1197 }, { "epoch": 2.2105263157894735, "grad_norm": 3.5968942642211914, "learning_rate": 3.585e-06, "loss": 0.4608, "step": 1198 }, { "epoch": 2.2123730378578026, "grad_norm": 2.87996506690979, "learning_rate": 3.588e-06, "loss": 0.4004, "step": 1199 }, { "epoch": 2.2142197599261313, "grad_norm": 2.8459644317626953, "learning_rate": 3.591e-06, "loss": 0.4628, "step": 1200 }, { "epoch": 2.21606648199446, "grad_norm": 1.9606138467788696, "learning_rate": 3.5940000000000002e-06, "loss": 0.3203, "step": 1201 }, { "epoch": 2.2179132040627887, "grad_norm": 3.2116522789001465, "learning_rate": 3.5970000000000005e-06, "loss": 0.3311, "step": 1202 }, { "epoch": 2.2197599261311174, "grad_norm": 3.8338441848754883, "learning_rate": 3.6e-06, "loss": 0.3352, "step": 1203 }, { "epoch": 2.221606648199446, "grad_norm": 2.1496729850769043, "learning_rate": 3.603e-06, "loss": 0.3138, "step": 1204 }, { "epoch": 2.223453370267775, "grad_norm": 4.74179220199585, "learning_rate": 3.6060000000000003e-06, "loss": 0.3463, "step": 1205 }, { "epoch": 2.2253000923361035, "grad_norm": 2.1020402908325195, "learning_rate": 3.609e-06, "loss": 0.3382, "step": 1206 }, { "epoch": 2.227146814404432, "grad_norm": 3.37388277053833, "learning_rate": 3.612e-06, "loss": 0.3619, "step": 1207 }, { "epoch": 2.228993536472761, "grad_norm": 3.114112377166748, "learning_rate": 3.615e-06, "loss": 0.2963, "step": 1208 }, { "epoch": 2.2308402585410896, "grad_norm": 15.57221794128418, "learning_rate": 3.618e-06, "loss": 0.3247, "step": 1209 }, { "epoch": 2.2326869806094183, "grad_norm": 3.1523525714874268, "learning_rate": 3.621e-06, "loss": 0.3519, "step": 1210 }, { "epoch": 2.234533702677747, "grad_norm": 4.5500688552856445, "learning_rate": 3.6240000000000004e-06, "loss": 0.3248, "step": 1211 }, { "epoch": 2.2363804247460757, "grad_norm": 3.338387966156006, "learning_rate": 3.6269999999999997e-06, "loss": 0.2886, "step": 1212 }, { "epoch": 2.2382271468144044, "grad_norm": 13.98320484161377, "learning_rate": 3.63e-06, "loss": 0.3965, "step": 1213 }, { "epoch": 2.240073868882733, "grad_norm": 2.9857637882232666, "learning_rate": 3.633e-06, "loss": 0.322, "step": 1214 }, { "epoch": 2.2419205909510618, "grad_norm": 2.754495143890381, "learning_rate": 3.636e-06, "loss": 0.3352, "step": 1215 }, { "epoch": 2.2437673130193905, "grad_norm": 3.092918634414673, "learning_rate": 3.6390000000000002e-06, "loss": 0.2727, "step": 1216 }, { "epoch": 2.245614035087719, "grad_norm": 2.6611902713775635, "learning_rate": 3.642e-06, "loss": 0.3195, "step": 1217 }, { "epoch": 2.247460757156048, "grad_norm": 5.46854305267334, "learning_rate": 3.645e-06, "loss": 0.3389, "step": 1218 }, { "epoch": 2.2493074792243766, "grad_norm": 3.306468963623047, "learning_rate": 3.648e-06, "loss": 0.3301, "step": 1219 }, { "epoch": 2.2511542012927053, "grad_norm": 5.192465782165527, "learning_rate": 3.6510000000000003e-06, "loss": 0.2876, "step": 1220 }, { "epoch": 2.253000923361034, "grad_norm": 6.205870628356934, "learning_rate": 3.654e-06, "loss": 0.3413, "step": 1221 }, { "epoch": 2.254847645429363, "grad_norm": 5.976049900054932, "learning_rate": 3.657e-06, "loss": 0.3459, "step": 1222 }, { "epoch": 2.2566943674976914, "grad_norm": 4.196659564971924, "learning_rate": 3.66e-06, "loss": 0.3013, "step": 1223 }, { "epoch": 2.2585410895660205, "grad_norm": 4.1666107177734375, "learning_rate": 3.663e-06, "loss": 0.3416, "step": 1224 }, { "epoch": 2.260387811634349, "grad_norm": 6.647060394287109, "learning_rate": 3.666e-06, "loss": 0.3489, "step": 1225 }, { "epoch": 2.262234533702678, "grad_norm": 10.108695983886719, "learning_rate": 3.6690000000000004e-06, "loss": 0.3276, "step": 1226 }, { "epoch": 2.2640812557710066, "grad_norm": 3.9936745166778564, "learning_rate": 3.6719999999999997e-06, "loss": 0.3288, "step": 1227 }, { "epoch": 2.2659279778393353, "grad_norm": 3.6058974266052246, "learning_rate": 3.675e-06, "loss": 0.3284, "step": 1228 }, { "epoch": 2.267774699907664, "grad_norm": 9.84175968170166, "learning_rate": 3.678e-06, "loss": 0.3364, "step": 1229 }, { "epoch": 2.2696214219759927, "grad_norm": 4.596037864685059, "learning_rate": 3.681e-06, "loss": 0.348, "step": 1230 }, { "epoch": 2.2714681440443214, "grad_norm": 3.893502712249756, "learning_rate": 3.6840000000000002e-06, "loss": 0.3163, "step": 1231 }, { "epoch": 2.27331486611265, "grad_norm": 2.600052833557129, "learning_rate": 3.687e-06, "loss": 0.3415, "step": 1232 }, { "epoch": 2.275161588180979, "grad_norm": 3.8423099517822266, "learning_rate": 3.6900000000000002e-06, "loss": 0.3369, "step": 1233 }, { "epoch": 2.2770083102493075, "grad_norm": 5.047562599182129, "learning_rate": 3.693e-06, "loss": 0.3862, "step": 1234 }, { "epoch": 2.278855032317636, "grad_norm": 6.9906697273254395, "learning_rate": 3.6960000000000003e-06, "loss": 1.0494, "step": 1235 }, { "epoch": 2.280701754385965, "grad_norm": 4.055783748626709, "learning_rate": 3.6990000000000005e-06, "loss": 0.8679, "step": 1236 }, { "epoch": 2.2825484764542936, "grad_norm": 22.55494499206543, "learning_rate": 3.702e-06, "loss": 0.6641, "step": 1237 }, { "epoch": 2.2843951985226223, "grad_norm": 4.827858924865723, "learning_rate": 3.705e-06, "loss": 0.771, "step": 1238 }, { "epoch": 2.286241920590951, "grad_norm": 2.824592113494873, "learning_rate": 3.7080000000000003e-06, "loss": 0.5988, "step": 1239 }, { "epoch": 2.2880886426592797, "grad_norm": 2.9814367294311523, "learning_rate": 3.711e-06, "loss": 0.6778, "step": 1240 }, { "epoch": 2.2899353647276084, "grad_norm": 2.789565324783325, "learning_rate": 3.714e-06, "loss": 0.6152, "step": 1241 }, { "epoch": 2.291782086795937, "grad_norm": 7.15195369720459, "learning_rate": 3.717e-06, "loss": 0.6609, "step": 1242 }, { "epoch": 2.293628808864266, "grad_norm": 2.280348062515259, "learning_rate": 3.72e-06, "loss": 0.489, "step": 1243 }, { "epoch": 2.2954755309325945, "grad_norm": 2.8647119998931885, "learning_rate": 3.723e-06, "loss": 0.4645, "step": 1244 }, { "epoch": 2.297322253000923, "grad_norm": 2.9245121479034424, "learning_rate": 3.7260000000000004e-06, "loss": 0.5328, "step": 1245 }, { "epoch": 2.299168975069252, "grad_norm": 2.771186113357544, "learning_rate": 3.7289999999999998e-06, "loss": 0.4194, "step": 1246 }, { "epoch": 2.301015697137581, "grad_norm": 2.297076463699341, "learning_rate": 3.732e-06, "loss": 0.4229, "step": 1247 }, { "epoch": 2.3028624192059093, "grad_norm": 4.746479511260986, "learning_rate": 3.7350000000000002e-06, "loss": 0.3892, "step": 1248 }, { "epoch": 2.3047091412742384, "grad_norm": 4.564180374145508, "learning_rate": 3.738e-06, "loss": 0.2941, "step": 1249 }, { "epoch": 2.306555863342567, "grad_norm": 3.5963242053985596, "learning_rate": 3.7410000000000003e-06, "loss": 0.4098, "step": 1250 }, { "epoch": 2.308402585410896, "grad_norm": 3.23600435256958, "learning_rate": 3.744e-06, "loss": 0.3319, "step": 1251 }, { "epoch": 2.3102493074792245, "grad_norm": 1.7888182401657104, "learning_rate": 3.747e-06, "loss": 0.279, "step": 1252 }, { "epoch": 2.312096029547553, "grad_norm": 2.327932357788086, "learning_rate": 3.75e-06, "loss": 0.3066, "step": 1253 }, { "epoch": 2.313942751615882, "grad_norm": 2.380089282989502, "learning_rate": 3.753e-06, "loss": 0.3174, "step": 1254 }, { "epoch": 2.3157894736842106, "grad_norm": 6.162848949432373, "learning_rate": 3.756e-06, "loss": 0.291, "step": 1255 }, { "epoch": 2.3176361957525393, "grad_norm": 2.3624391555786133, "learning_rate": 3.759e-06, "loss": 0.3356, "step": 1256 }, { "epoch": 2.319482917820868, "grad_norm": 2.3133296966552734, "learning_rate": 3.7620000000000006e-06, "loss": 0.2871, "step": 1257 }, { "epoch": 2.3213296398891967, "grad_norm": 2.8012380599975586, "learning_rate": 3.765e-06, "loss": 0.2981, "step": 1258 }, { "epoch": 2.3231763619575254, "grad_norm": 6.965078353881836, "learning_rate": 3.7679999999999998e-06, "loss": 0.2726, "step": 1259 }, { "epoch": 2.325023084025854, "grad_norm": 2.270150899887085, "learning_rate": 3.7710000000000004e-06, "loss": 0.3289, "step": 1260 }, { "epoch": 2.326869806094183, "grad_norm": 12.200182914733887, "learning_rate": 3.7739999999999998e-06, "loss": 0.308, "step": 1261 }, { "epoch": 2.3287165281625115, "grad_norm": 3.335463285446167, "learning_rate": 3.7770000000000004e-06, "loss": 0.2571, "step": 1262 }, { "epoch": 2.33056325023084, "grad_norm": 3.326897144317627, "learning_rate": 3.7800000000000002e-06, "loss": 0.275, "step": 1263 }, { "epoch": 2.332409972299169, "grad_norm": 2.531522274017334, "learning_rate": 3.7829999999999996e-06, "loss": 0.2572, "step": 1264 }, { "epoch": 2.3342566943674976, "grad_norm": 3.5164692401885986, "learning_rate": 3.7860000000000003e-06, "loss": 0.3005, "step": 1265 }, { "epoch": 2.3361034164358263, "grad_norm": 2.500251531600952, "learning_rate": 3.789e-06, "loss": 0.2758, "step": 1266 }, { "epoch": 2.337950138504155, "grad_norm": 2.022705078125, "learning_rate": 3.7920000000000003e-06, "loss": 0.2581, "step": 1267 }, { "epoch": 2.3397968605724837, "grad_norm": 3.829115629196167, "learning_rate": 3.795e-06, "loss": 0.2543, "step": 1268 }, { "epoch": 2.3416435826408124, "grad_norm": 2.7531073093414307, "learning_rate": 3.798e-06, "loss": 0.2756, "step": 1269 }, { "epoch": 2.343490304709141, "grad_norm": 3.9547321796417236, "learning_rate": 3.801e-06, "loss": 0.2785, "step": 1270 }, { "epoch": 2.34533702677747, "grad_norm": 2.6992123126983643, "learning_rate": 3.804e-06, "loss": 0.308, "step": 1271 }, { "epoch": 2.347183748845799, "grad_norm": 3.0379865169525146, "learning_rate": 3.8070000000000006e-06, "loss": 0.2926, "step": 1272 }, { "epoch": 2.349030470914127, "grad_norm": 7.398136615753174, "learning_rate": 3.81e-06, "loss": 0.2887, "step": 1273 }, { "epoch": 2.3508771929824563, "grad_norm": 2.398902177810669, "learning_rate": 3.8129999999999997e-06, "loss": 0.2795, "step": 1274 }, { "epoch": 2.352723915050785, "grad_norm": 3.1875367164611816, "learning_rate": 3.816e-06, "loss": 0.2811, "step": 1275 }, { "epoch": 2.3545706371191137, "grad_norm": 5.333486080169678, "learning_rate": 3.819e-06, "loss": 0.2525, "step": 1276 }, { "epoch": 2.3564173591874424, "grad_norm": 3.6999690532684326, "learning_rate": 3.822000000000001e-06, "loss": 0.3073, "step": 1277 }, { "epoch": 2.358264081255771, "grad_norm": 9.801976203918457, "learning_rate": 3.825e-06, "loss": 0.2757, "step": 1278 }, { "epoch": 2.3601108033241, "grad_norm": 3.6513302326202393, "learning_rate": 3.828e-06, "loss": 0.2975, "step": 1279 }, { "epoch": 2.3619575253924285, "grad_norm": 19.606441497802734, "learning_rate": 3.831e-06, "loss": 0.3085, "step": 1280 }, { "epoch": 2.3638042474607572, "grad_norm": 3.0111238956451416, "learning_rate": 3.834e-06, "loss": 0.4162, "step": 1281 }, { "epoch": 2.365650969529086, "grad_norm": 3.498967170715332, "learning_rate": 3.837000000000001e-06, "loss": 0.2412, "step": 1282 }, { "epoch": 2.3674976915974146, "grad_norm": 3.1680524349212646, "learning_rate": 3.8400000000000005e-06, "loss": 0.321, "step": 1283 }, { "epoch": 2.3693444136657433, "grad_norm": 3.2572083473205566, "learning_rate": 3.8429999999999995e-06, "loss": 0.3806, "step": 1284 }, { "epoch": 2.371191135734072, "grad_norm": 8.028724670410156, "learning_rate": 3.846e-06, "loss": 0.9676, "step": 1285 }, { "epoch": 2.3730378578024007, "grad_norm": 3.0075464248657227, "learning_rate": 3.849e-06, "loss": 0.7188, "step": 1286 }, { "epoch": 2.3748845798707294, "grad_norm": 3.325401544570923, "learning_rate": 3.852e-06, "loss": 0.7748, "step": 1287 }, { "epoch": 2.376731301939058, "grad_norm": 3.004544973373413, "learning_rate": 3.855e-06, "loss": 0.6705, "step": 1288 }, { "epoch": 2.378578024007387, "grad_norm": 5.643571853637695, "learning_rate": 3.858e-06, "loss": 0.648, "step": 1289 }, { "epoch": 2.3804247460757155, "grad_norm": 3.7997190952301025, "learning_rate": 3.861e-06, "loss": 0.6225, "step": 1290 }, { "epoch": 2.3822714681440442, "grad_norm": 3.801635265350342, "learning_rate": 3.864e-06, "loss": 0.5235, "step": 1291 }, { "epoch": 2.384118190212373, "grad_norm": 6.873345375061035, "learning_rate": 3.8669999999999996e-06, "loss": 0.5504, "step": 1292 }, { "epoch": 2.3859649122807016, "grad_norm": 30.787782669067383, "learning_rate": 3.87e-06, "loss": 0.6031, "step": 1293 }, { "epoch": 2.3878116343490303, "grad_norm": 2.551400661468506, "learning_rate": 3.873e-06, "loss": 0.4685, "step": 1294 }, { "epoch": 2.389658356417359, "grad_norm": 2.9074532985687256, "learning_rate": 3.876000000000001e-06, "loss": 0.4373, "step": 1295 }, { "epoch": 2.3915050784856877, "grad_norm": 2.8484601974487305, "learning_rate": 3.8790000000000005e-06, "loss": 0.4662, "step": 1296 }, { "epoch": 2.393351800554017, "grad_norm": 1.9514797925949097, "learning_rate": 3.8819999999999994e-06, "loss": 0.3518, "step": 1297 }, { "epoch": 2.395198522622345, "grad_norm": 2.5072360038757324, "learning_rate": 3.885e-06, "loss": 0.3705, "step": 1298 }, { "epoch": 2.3970452446906743, "grad_norm": 3.407132863998413, "learning_rate": 3.888e-06, "loss": 0.4064, "step": 1299 }, { "epoch": 2.398891966759003, "grad_norm": 3.2116904258728027, "learning_rate": 3.8910000000000005e-06, "loss": 0.279, "step": 1300 }, { "epoch": 2.4007386888273317, "grad_norm": 2.4596047401428223, "learning_rate": 3.894e-06, "loss": 0.3032, "step": 1301 }, { "epoch": 2.4025854108956604, "grad_norm": 2.024303913116455, "learning_rate": 3.897e-06, "loss": 0.2755, "step": 1302 }, { "epoch": 2.404432132963989, "grad_norm": 2.66182017326355, "learning_rate": 3.9e-06, "loss": 0.367, "step": 1303 }, { "epoch": 2.4062788550323178, "grad_norm": 2.259300470352173, "learning_rate": 3.903e-06, "loss": 0.2667, "step": 1304 }, { "epoch": 2.4081255771006465, "grad_norm": 1.9543769359588623, "learning_rate": 3.906e-06, "loss": 0.2952, "step": 1305 }, { "epoch": 2.409972299168975, "grad_norm": 2.3687384128570557, "learning_rate": 3.909e-06, "loss": 0.2995, "step": 1306 }, { "epoch": 2.411819021237304, "grad_norm": 2.4152207374572754, "learning_rate": 3.912e-06, "loss": 0.2687, "step": 1307 }, { "epoch": 2.4136657433056325, "grad_norm": 2.8712761402130127, "learning_rate": 3.915000000000001e-06, "loss": 0.3216, "step": 1308 }, { "epoch": 2.4155124653739612, "grad_norm": 4.489912033081055, "learning_rate": 3.918e-06, "loss": 0.2717, "step": 1309 }, { "epoch": 2.41735918744229, "grad_norm": 5.658008575439453, "learning_rate": 3.921e-06, "loss": 0.2734, "step": 1310 }, { "epoch": 2.4192059095106186, "grad_norm": 4.131112575531006, "learning_rate": 3.924e-06, "loss": 0.2723, "step": 1311 }, { "epoch": 2.4210526315789473, "grad_norm": 2.3462915420532227, "learning_rate": 3.927e-06, "loss": 0.231, "step": 1312 }, { "epoch": 2.422899353647276, "grad_norm": 2.1342649459838867, "learning_rate": 3.9300000000000005e-06, "loss": 0.2303, "step": 1313 }, { "epoch": 2.4247460757156047, "grad_norm": 2.8530802726745605, "learning_rate": 3.933e-06, "loss": 0.2828, "step": 1314 }, { "epoch": 2.4265927977839334, "grad_norm": 5.358726978302002, "learning_rate": 3.936e-06, "loss": 0.2884, "step": 1315 }, { "epoch": 2.428439519852262, "grad_norm": 4.366053581237793, "learning_rate": 3.939e-06, "loss": 0.2985, "step": 1316 }, { "epoch": 2.430286241920591, "grad_norm": 2.7431342601776123, "learning_rate": 3.942e-06, "loss": 0.2599, "step": 1317 }, { "epoch": 2.4321329639889195, "grad_norm": 2.0079314708709717, "learning_rate": 3.945e-06, "loss": 0.2414, "step": 1318 }, { "epoch": 2.4339796860572482, "grad_norm": 2.379559278488159, "learning_rate": 3.948e-06, "loss": 0.2709, "step": 1319 }, { "epoch": 2.435826408125577, "grad_norm": 2.566490411758423, "learning_rate": 3.951000000000001e-06, "loss": 0.2579, "step": 1320 }, { "epoch": 2.4376731301939056, "grad_norm": 2.704746723175049, "learning_rate": 3.954e-06, "loss": 0.2446, "step": 1321 }, { "epoch": 2.439519852262235, "grad_norm": 2.497107744216919, "learning_rate": 3.9569999999999996e-06, "loss": 0.2526, "step": 1322 }, { "epoch": 2.441366574330563, "grad_norm": 2.8292629718780518, "learning_rate": 3.96e-06, "loss": 0.31, "step": 1323 }, { "epoch": 2.443213296398892, "grad_norm": 11.356720924377441, "learning_rate": 3.963e-06, "loss": 0.2827, "step": 1324 }, { "epoch": 2.445060018467221, "grad_norm": 2.888221263885498, "learning_rate": 3.966000000000001e-06, "loss": 0.2188, "step": 1325 }, { "epoch": 2.4469067405355496, "grad_norm": 2.1754820346832275, "learning_rate": 3.9690000000000005e-06, "loss": 0.3131, "step": 1326 }, { "epoch": 2.4487534626038783, "grad_norm": 3.7316973209381104, "learning_rate": 3.971999999999999e-06, "loss": 0.286, "step": 1327 }, { "epoch": 2.450600184672207, "grad_norm": 4.8265581130981445, "learning_rate": 3.975e-06, "loss": 0.3083, "step": 1328 }, { "epoch": 2.4524469067405357, "grad_norm": 2.3270061016082764, "learning_rate": 3.978e-06, "loss": 0.2119, "step": 1329 }, { "epoch": 2.4542936288088644, "grad_norm": 2.599588394165039, "learning_rate": 3.9810000000000005e-06, "loss": 0.2849, "step": 1330 }, { "epoch": 2.456140350877193, "grad_norm": 3.5899312496185303, "learning_rate": 3.984e-06, "loss": 0.3126, "step": 1331 }, { "epoch": 2.4579870729455218, "grad_norm": 5.132264137268066, "learning_rate": 3.987e-06, "loss": 0.3042, "step": 1332 }, { "epoch": 2.4598337950138505, "grad_norm": 2.8291397094726562, "learning_rate": 3.99e-06, "loss": 0.2809, "step": 1333 }, { "epoch": 2.461680517082179, "grad_norm": 3.450786828994751, "learning_rate": 3.993e-06, "loss": 0.3302, "step": 1334 }, { "epoch": 2.463527239150508, "grad_norm": 4.955879211425781, "learning_rate": 3.996e-06, "loss": 0.8375, "step": 1335 }, { "epoch": 2.4653739612188366, "grad_norm": 5.112767219543457, "learning_rate": 3.999e-06, "loss": 0.779, "step": 1336 }, { "epoch": 2.4672206832871653, "grad_norm": 3.635031223297119, "learning_rate": 4.002e-06, "loss": 0.6466, "step": 1337 }, { "epoch": 2.469067405355494, "grad_norm": 2.0046777725219727, "learning_rate": 4.005000000000001e-06, "loss": 0.7017, "step": 1338 }, { "epoch": 2.4709141274238227, "grad_norm": 3.8623645305633545, "learning_rate": 4.008e-06, "loss": 0.5682, "step": 1339 }, { "epoch": 2.4727608494921514, "grad_norm": 2.804150342941284, "learning_rate": 4.011e-06, "loss": 0.5209, "step": 1340 }, { "epoch": 2.47460757156048, "grad_norm": 2.1142044067382812, "learning_rate": 4.014e-06, "loss": 0.4553, "step": 1341 }, { "epoch": 2.4764542936288088, "grad_norm": 3.2303378582000732, "learning_rate": 4.017e-06, "loss": 0.5504, "step": 1342 }, { "epoch": 2.4783010156971375, "grad_norm": 2.541952610015869, "learning_rate": 4.0200000000000005e-06, "loss": 0.4207, "step": 1343 }, { "epoch": 2.480147737765466, "grad_norm": 7.729579925537109, "learning_rate": 4.023e-06, "loss": 0.4172, "step": 1344 }, { "epoch": 2.481994459833795, "grad_norm": 2.261575698852539, "learning_rate": 4.026000000000001e-06, "loss": 0.4869, "step": 1345 }, { "epoch": 2.4838411819021236, "grad_norm": 3.2447593212127686, "learning_rate": 4.029e-06, "loss": 0.4203, "step": 1346 }, { "epoch": 2.4856879039704527, "grad_norm": 2.228067398071289, "learning_rate": 4.032e-06, "loss": 0.36, "step": 1347 }, { "epoch": 2.487534626038781, "grad_norm": 2.548985719680786, "learning_rate": 4.035e-06, "loss": 0.3544, "step": 1348 }, { "epoch": 2.48938134810711, "grad_norm": 2.606369733810425, "learning_rate": 4.038e-06, "loss": 0.3304, "step": 1349 }, { "epoch": 2.4912280701754383, "grad_norm": 2.1755919456481934, "learning_rate": 4.041e-06, "loss": 0.3204, "step": 1350 }, { "epoch": 2.4930747922437675, "grad_norm": 2.264122724533081, "learning_rate": 4.044000000000001e-06, "loss": 0.2592, "step": 1351 }, { "epoch": 2.494921514312096, "grad_norm": 2.880892515182495, "learning_rate": 4.0469999999999995e-06, "loss": 0.2682, "step": 1352 }, { "epoch": 2.496768236380425, "grad_norm": 2.182246685028076, "learning_rate": 4.05e-06, "loss": 0.2418, "step": 1353 }, { "epoch": 2.4986149584487536, "grad_norm": 1.7105096578598022, "learning_rate": 4.053e-06, "loss": 0.2489, "step": 1354 }, { "epoch": 2.5004616805170823, "grad_norm": 4.293247222900391, "learning_rate": 4.056e-06, "loss": 0.2585, "step": 1355 }, { "epoch": 2.502308402585411, "grad_norm": 1.896207332611084, "learning_rate": 4.0590000000000004e-06, "loss": 0.2307, "step": 1356 }, { "epoch": 2.5041551246537397, "grad_norm": 3.0287654399871826, "learning_rate": 4.062e-06, "loss": 0.2302, "step": 1357 }, { "epoch": 2.5060018467220684, "grad_norm": 1.682420015335083, "learning_rate": 4.065e-06, "loss": 0.2106, "step": 1358 }, { "epoch": 2.507848568790397, "grad_norm": 2.477034330368042, "learning_rate": 4.068e-06, "loss": 0.2265, "step": 1359 }, { "epoch": 2.509695290858726, "grad_norm": 2.927973508834839, "learning_rate": 4.071e-06, "loss": 0.2257, "step": 1360 }, { "epoch": 2.5115420129270545, "grad_norm": 2.208219051361084, "learning_rate": 4.074e-06, "loss": 0.2496, "step": 1361 }, { "epoch": 2.513388734995383, "grad_norm": 2.2488653659820557, "learning_rate": 4.077e-06, "loss": 0.2835, "step": 1362 }, { "epoch": 2.515235457063712, "grad_norm": 2.399538993835449, "learning_rate": 4.080000000000001e-06, "loss": 0.2286, "step": 1363 }, { "epoch": 2.5170821791320406, "grad_norm": 3.4305288791656494, "learning_rate": 4.083e-06, "loss": 0.2554, "step": 1364 }, { "epoch": 2.5189289012003693, "grad_norm": 2.3506622314453125, "learning_rate": 4.0859999999999995e-06, "loss": 0.3603, "step": 1365 }, { "epoch": 2.520775623268698, "grad_norm": 2.6317431926727295, "learning_rate": 4.089e-06, "loss": 0.2436, "step": 1366 }, { "epoch": 2.5226223453370267, "grad_norm": 3.0093767642974854, "learning_rate": 4.092e-06, "loss": 0.2334, "step": 1367 }, { "epoch": 2.5244690674053554, "grad_norm": 3.368942975997925, "learning_rate": 4.095000000000001e-06, "loss": 0.208, "step": 1368 }, { "epoch": 2.526315789473684, "grad_norm": 2.1555802822113037, "learning_rate": 4.098e-06, "loss": 0.2505, "step": 1369 }, { "epoch": 2.5281625115420128, "grad_norm": 2.5781638622283936, "learning_rate": 4.100999999999999e-06, "loss": 0.219, "step": 1370 }, { "epoch": 2.5300092336103415, "grad_norm": 4.542846202850342, "learning_rate": 4.104e-06, "loss": 0.2179, "step": 1371 }, { "epoch": 2.5318559556786706, "grad_norm": 1.7584036588668823, "learning_rate": 4.107e-06, "loss": 0.1994, "step": 1372 }, { "epoch": 2.533702677746999, "grad_norm": 1.8268120288848877, "learning_rate": 4.1100000000000005e-06, "loss": 0.2002, "step": 1373 }, { "epoch": 2.535549399815328, "grad_norm": 2.2652413845062256, "learning_rate": 4.113e-06, "loss": 0.2568, "step": 1374 }, { "epoch": 2.5373961218836563, "grad_norm": 4.071157932281494, "learning_rate": 4.116e-06, "loss": 0.2373, "step": 1375 }, { "epoch": 2.5392428439519854, "grad_norm": 2.830423355102539, "learning_rate": 4.119e-06, "loss": 0.1984, "step": 1376 }, { "epoch": 2.541089566020314, "grad_norm": 2.961411237716675, "learning_rate": 4.122e-06, "loss": 0.2728, "step": 1377 }, { "epoch": 2.542936288088643, "grad_norm": 2.2902283668518066, "learning_rate": 4.125e-06, "loss": 0.2201, "step": 1378 }, { "epoch": 2.5447830101569715, "grad_norm": 2.1575589179992676, "learning_rate": 4.128e-06, "loss": 0.206, "step": 1379 }, { "epoch": 2.5466297322253, "grad_norm": 2.122411012649536, "learning_rate": 4.131e-06, "loss": 0.2204, "step": 1380 }, { "epoch": 2.548476454293629, "grad_norm": 2.2870776653289795, "learning_rate": 4.1340000000000006e-06, "loss": 0.267, "step": 1381 }, { "epoch": 2.5503231763619576, "grad_norm": 2.6069374084472656, "learning_rate": 4.137e-06, "loss": 0.2183, "step": 1382 }, { "epoch": 2.5521698984302863, "grad_norm": 2.5496888160705566, "learning_rate": 4.14e-06, "loss": 0.2969, "step": 1383 }, { "epoch": 2.554016620498615, "grad_norm": 4.724157333374023, "learning_rate": 4.143e-06, "loss": 0.253, "step": 1384 }, { "epoch": 2.5558633425669437, "grad_norm": 6.4206390380859375, "learning_rate": 4.146e-06, "loss": 0.8273, "step": 1385 }, { "epoch": 2.5577100646352724, "grad_norm": 6.399569511413574, "learning_rate": 4.1490000000000004e-06, "loss": 0.857, "step": 1386 }, { "epoch": 2.559556786703601, "grad_norm": 1.8726593255996704, "learning_rate": 4.152e-06, "loss": 0.5886, "step": 1387 }, { "epoch": 2.56140350877193, "grad_norm": 3.172929525375366, "learning_rate": 4.155000000000001e-06, "loss": 0.5749, "step": 1388 }, { "epoch": 2.5632502308402585, "grad_norm": 2.1683387756347656, "learning_rate": 4.158e-06, "loss": 0.5789, "step": 1389 }, { "epoch": 2.565096952908587, "grad_norm": 2.343235969543457, "learning_rate": 4.161e-06, "loss": 0.597, "step": 1390 }, { "epoch": 2.566943674976916, "grad_norm": 2.232675552368164, "learning_rate": 4.164e-06, "loss": 0.5261, "step": 1391 }, { "epoch": 2.5687903970452446, "grad_norm": 1.9980791807174683, "learning_rate": 4.167e-06, "loss": 0.4831, "step": 1392 }, { "epoch": 2.5706371191135733, "grad_norm": 2.491255521774292, "learning_rate": 4.170000000000001e-06, "loss": 0.5287, "step": 1393 }, { "epoch": 2.572483841181902, "grad_norm": 1.8359003067016602, "learning_rate": 4.1730000000000005e-06, "loss": 0.384, "step": 1394 }, { "epoch": 2.5743305632502307, "grad_norm": 2.0215978622436523, "learning_rate": 4.1759999999999995e-06, "loss": 0.3815, "step": 1395 }, { "epoch": 2.5761772853185594, "grad_norm": 1.9105162620544434, "learning_rate": 4.179e-06, "loss": 0.4254, "step": 1396 }, { "epoch": 2.5780240073868885, "grad_norm": 5.220210552215576, "learning_rate": 4.182e-06, "loss": 0.4206, "step": 1397 }, { "epoch": 2.579870729455217, "grad_norm": 1.760629653930664, "learning_rate": 4.185000000000001e-06, "loss": 0.2673, "step": 1398 }, { "epoch": 2.581717451523546, "grad_norm": 2.2482142448425293, "learning_rate": 4.188e-06, "loss": 0.271, "step": 1399 }, { "epoch": 2.583564173591874, "grad_norm": 1.5858904123306274, "learning_rate": 4.191e-06, "loss": 0.277, "step": 1400 }, { "epoch": 2.5854108956602033, "grad_norm": 2.4095640182495117, "learning_rate": 4.194e-06, "loss": 0.1968, "step": 1401 }, { "epoch": 2.587257617728532, "grad_norm": 1.7928316593170166, "learning_rate": 4.197e-06, "loss": 0.2765, "step": 1402 }, { "epoch": 2.5891043397968607, "grad_norm": 2.0820446014404297, "learning_rate": 4.2000000000000004e-06, "loss": 0.2139, "step": 1403 }, { "epoch": 2.5909510618651894, "grad_norm": 1.5978951454162598, "learning_rate": 4.203e-06, "loss": 0.1674, "step": 1404 }, { "epoch": 2.592797783933518, "grad_norm": 1.8610577583312988, "learning_rate": 4.206e-06, "loss": 0.1904, "step": 1405 }, { "epoch": 2.594644506001847, "grad_norm": 2.742856740951538, "learning_rate": 4.209000000000001e-06, "loss": 0.2835, "step": 1406 }, { "epoch": 2.5964912280701755, "grad_norm": 1.738468885421753, "learning_rate": 4.212e-06, "loss": 0.1867, "step": 1407 }, { "epoch": 2.598337950138504, "grad_norm": 7.230634689331055, "learning_rate": 4.215e-06, "loss": 0.231, "step": 1408 }, { "epoch": 2.600184672206833, "grad_norm": 2.0607056617736816, "learning_rate": 4.218e-06, "loss": 0.268, "step": 1409 }, { "epoch": 2.6020313942751616, "grad_norm": 2.3839364051818848, "learning_rate": 4.221e-06, "loss": 0.2136, "step": 1410 }, { "epoch": 2.6038781163434903, "grad_norm": 3.0196101665496826, "learning_rate": 4.2240000000000006e-06, "loss": 0.1609, "step": 1411 }, { "epoch": 2.605724838411819, "grad_norm": 2.1394782066345215, "learning_rate": 4.227e-06, "loss": 0.3409, "step": 1412 }, { "epoch": 2.6075715604801477, "grad_norm": 2.8312456607818604, "learning_rate": 4.229999999999999e-06, "loss": 0.2025, "step": 1413 }, { "epoch": 2.6094182825484764, "grad_norm": 2.4542574882507324, "learning_rate": 4.233e-06, "loss": 0.1856, "step": 1414 }, { "epoch": 2.611265004616805, "grad_norm": 3.183842658996582, "learning_rate": 4.236e-06, "loss": 0.228, "step": 1415 }, { "epoch": 2.613111726685134, "grad_norm": 2.6048052310943604, "learning_rate": 4.239e-06, "loss": 0.2479, "step": 1416 }, { "epoch": 2.6149584487534625, "grad_norm": 2.054537773132324, "learning_rate": 4.242e-06, "loss": 0.2015, "step": 1417 }, { "epoch": 2.616805170821791, "grad_norm": 2.2461042404174805, "learning_rate": 4.245e-06, "loss": 0.2257, "step": 1418 }, { "epoch": 2.61865189289012, "grad_norm": 2.653735876083374, "learning_rate": 4.248e-06, "loss": 0.2595, "step": 1419 }, { "epoch": 2.6204986149584486, "grad_norm": 1.8901677131652832, "learning_rate": 4.251e-06, "loss": 0.1943, "step": 1420 }, { "epoch": 2.6223453370267773, "grad_norm": 2.2802734375, "learning_rate": 4.254e-06, "loss": 0.2195, "step": 1421 }, { "epoch": 2.6241920590951064, "grad_norm": 2.666233539581299, "learning_rate": 4.257e-06, "loss": 0.3164, "step": 1422 }, { "epoch": 2.6260387811634347, "grad_norm": 3.170790195465088, "learning_rate": 4.26e-06, "loss": 0.221, "step": 1423 }, { "epoch": 2.627885503231764, "grad_norm": 1.983960747718811, "learning_rate": 4.2630000000000005e-06, "loss": 0.2229, "step": 1424 }, { "epoch": 2.629732225300092, "grad_norm": 2.3078384399414062, "learning_rate": 4.266e-06, "loss": 0.2237, "step": 1425 }, { "epoch": 2.6315789473684212, "grad_norm": 1.8761299848556519, "learning_rate": 4.269e-06, "loss": 0.2458, "step": 1426 }, { "epoch": 2.6334256694367495, "grad_norm": 3.2777647972106934, "learning_rate": 4.272e-06, "loss": 0.2519, "step": 1427 }, { "epoch": 2.6352723915050786, "grad_norm": 2.3433635234832764, "learning_rate": 4.275e-06, "loss": 0.2276, "step": 1428 }, { "epoch": 2.6371191135734073, "grad_norm": 3.0046474933624268, "learning_rate": 4.278e-06, "loss": 0.215, "step": 1429 }, { "epoch": 2.638965835641736, "grad_norm": 2.6063246726989746, "learning_rate": 4.281e-06, "loss": 0.2128, "step": 1430 }, { "epoch": 2.6408125577100647, "grad_norm": 2.668186902999878, "learning_rate": 4.284000000000001e-06, "loss": 0.2393, "step": 1431 }, { "epoch": 2.6426592797783934, "grad_norm": 2.6497902870178223, "learning_rate": 4.287e-06, "loss": 0.2509, "step": 1432 }, { "epoch": 2.644506001846722, "grad_norm": 2.981414556503296, "learning_rate": 4.29e-06, "loss": 0.2447, "step": 1433 }, { "epoch": 2.646352723915051, "grad_norm": 2.751962900161743, "learning_rate": 4.293e-06, "loss": 0.2624, "step": 1434 }, { "epoch": 2.6481994459833795, "grad_norm": 4.448115825653076, "learning_rate": 4.296e-06, "loss": 0.8055, "step": 1435 }, { "epoch": 2.6500461680517082, "grad_norm": 2.5063092708587646, "learning_rate": 4.299000000000001e-06, "loss": 0.7689, "step": 1436 }, { "epoch": 2.651892890120037, "grad_norm": 2.950172185897827, "learning_rate": 4.3020000000000005e-06, "loss": 0.6302, "step": 1437 }, { "epoch": 2.6537396121883656, "grad_norm": 3.074154853820801, "learning_rate": 4.3049999999999994e-06, "loss": 0.5561, "step": 1438 }, { "epoch": 2.6555863342566943, "grad_norm": 2.672722101211548, "learning_rate": 4.308e-06, "loss": 0.605, "step": 1439 }, { "epoch": 2.657433056325023, "grad_norm": 1.9718472957611084, "learning_rate": 4.311e-06, "loss": 0.4401, "step": 1440 }, { "epoch": 2.6592797783933517, "grad_norm": 1.7161527872085571, "learning_rate": 4.3140000000000005e-06, "loss": 0.4255, "step": 1441 }, { "epoch": 2.6611265004616804, "grad_norm": 2.125276803970337, "learning_rate": 4.317e-06, "loss": 0.4338, "step": 1442 }, { "epoch": 2.662973222530009, "grad_norm": 2.82174015045166, "learning_rate": 4.32e-06, "loss": 0.4755, "step": 1443 }, { "epoch": 2.664819944598338, "grad_norm": 1.9432512521743774, "learning_rate": 4.323e-06, "loss": 0.5066, "step": 1444 }, { "epoch": 2.6666666666666665, "grad_norm": 1.7846685647964478, "learning_rate": 4.326e-06, "loss": 0.3927, "step": 1445 }, { "epoch": 2.668513388734995, "grad_norm": 2.2068796157836914, "learning_rate": 4.329e-06, "loss": 0.3077, "step": 1446 }, { "epoch": 2.6703601108033244, "grad_norm": 2.306232213973999, "learning_rate": 4.332e-06, "loss": 0.3395, "step": 1447 }, { "epoch": 2.6722068328716526, "grad_norm": 2.3692593574523926, "learning_rate": 4.335e-06, "loss": 0.2894, "step": 1448 }, { "epoch": 2.6740535549399818, "grad_norm": 1.493941307067871, "learning_rate": 4.338000000000001e-06, "loss": 0.3051, "step": 1449 }, { "epoch": 2.67590027700831, "grad_norm": 2.4704809188842773, "learning_rate": 4.341e-06, "loss": 0.3822, "step": 1450 }, { "epoch": 2.677746999076639, "grad_norm": 2.337467670440674, "learning_rate": 4.344e-06, "loss": 0.2224, "step": 1451 }, { "epoch": 2.6795937211449674, "grad_norm": 1.7623108625411987, "learning_rate": 4.347e-06, "loss": 0.1888, "step": 1452 }, { "epoch": 2.6814404432132966, "grad_norm": 3.122779369354248, "learning_rate": 4.35e-06, "loss": 0.2508, "step": 1453 }, { "epoch": 2.6832871652816253, "grad_norm": 2.7472877502441406, "learning_rate": 4.3530000000000005e-06, "loss": 0.1959, "step": 1454 }, { "epoch": 2.685133887349954, "grad_norm": 1.7916723489761353, "learning_rate": 4.356e-06, "loss": 0.193, "step": 1455 }, { "epoch": 2.6869806094182827, "grad_norm": 1.9161310195922852, "learning_rate": 4.359e-06, "loss": 0.2094, "step": 1456 }, { "epoch": 2.6888273314866113, "grad_norm": 1.7572247982025146, "learning_rate": 4.362e-06, "loss": 0.1904, "step": 1457 }, { "epoch": 2.69067405355494, "grad_norm": 1.8705005645751953, "learning_rate": 4.365e-06, "loss": 0.1926, "step": 1458 }, { "epoch": 2.6925207756232687, "grad_norm": 4.131222724914551, "learning_rate": 4.368e-06, "loss": 0.1821, "step": 1459 }, { "epoch": 2.6943674976915974, "grad_norm": 1.7637590169906616, "learning_rate": 4.371e-06, "loss": 0.2116, "step": 1460 }, { "epoch": 2.696214219759926, "grad_norm": 2.1233274936676025, "learning_rate": 4.374000000000001e-06, "loss": 0.1705, "step": 1461 }, { "epoch": 2.698060941828255, "grad_norm": 2.2717113494873047, "learning_rate": 4.377e-06, "loss": 0.1745, "step": 1462 }, { "epoch": 2.6999076638965835, "grad_norm": 1.8533835411071777, "learning_rate": 4.3799999999999996e-06, "loss": 0.211, "step": 1463 }, { "epoch": 2.7017543859649122, "grad_norm": 1.4384863376617432, "learning_rate": 4.383e-06, "loss": 0.1767, "step": 1464 }, { "epoch": 2.703601108033241, "grad_norm": 4.695356369018555, "learning_rate": 4.386e-06, "loss": 0.2391, "step": 1465 }, { "epoch": 2.7054478301015696, "grad_norm": 1.8919119834899902, "learning_rate": 4.389000000000001e-06, "loss": 0.2206, "step": 1466 }, { "epoch": 2.7072945521698983, "grad_norm": 2.0214920043945312, "learning_rate": 4.3920000000000005e-06, "loss": 0.2006, "step": 1467 }, { "epoch": 2.709141274238227, "grad_norm": 2.4652929306030273, "learning_rate": 4.395e-06, "loss": 0.1902, "step": 1468 }, { "epoch": 2.7109879963065557, "grad_norm": 2.2650187015533447, "learning_rate": 4.398e-06, "loss": 0.1999, "step": 1469 }, { "epoch": 2.7128347183748844, "grad_norm": 2.400144338607788, "learning_rate": 4.401e-06, "loss": 0.2368, "step": 1470 }, { "epoch": 2.714681440443213, "grad_norm": 2.085756540298462, "learning_rate": 4.4040000000000005e-06, "loss": 0.2032, "step": 1471 }, { "epoch": 2.7165281625115423, "grad_norm": 3.604992628097534, "learning_rate": 4.407e-06, "loss": 0.2296, "step": 1472 }, { "epoch": 2.7183748845798705, "grad_norm": 1.75570547580719, "learning_rate": 4.41e-06, "loss": 0.1673, "step": 1473 }, { "epoch": 2.7202216066481997, "grad_norm": 2.396017551422119, "learning_rate": 4.413000000000001e-06, "loss": 0.1703, "step": 1474 }, { "epoch": 2.722068328716528, "grad_norm": 2.6366143226623535, "learning_rate": 4.416e-06, "loss": 0.2345, "step": 1475 }, { "epoch": 2.723915050784857, "grad_norm": 1.9363056421279907, "learning_rate": 4.4189999999999995e-06, "loss": 0.1987, "step": 1476 }, { "epoch": 2.7257617728531853, "grad_norm": 2.850567102432251, "learning_rate": 4.422e-06, "loss": 0.1939, "step": 1477 }, { "epoch": 2.7276084949215145, "grad_norm": 2.088304281234741, "learning_rate": 4.425e-06, "loss": 0.2129, "step": 1478 }, { "epoch": 2.729455216989843, "grad_norm": 2.7488322257995605, "learning_rate": 4.428000000000001e-06, "loss": 0.2247, "step": 1479 }, { "epoch": 2.731301939058172, "grad_norm": 3.1374897956848145, "learning_rate": 4.4310000000000004e-06, "loss": 0.2246, "step": 1480 }, { "epoch": 2.7331486611265006, "grad_norm": 2.563474416732788, "learning_rate": 4.433999999999999e-06, "loss": 0.2151, "step": 1481 }, { "epoch": 2.7349953831948293, "grad_norm": 2.290768623352051, "learning_rate": 4.437e-06, "loss": 0.2009, "step": 1482 }, { "epoch": 2.736842105263158, "grad_norm": 3.419640302658081, "learning_rate": 4.44e-06, "loss": 0.2311, "step": 1483 }, { "epoch": 2.7386888273314867, "grad_norm": 3.651583671569824, "learning_rate": 4.4430000000000005e-06, "loss": 0.303, "step": 1484 }, { "epoch": 2.7405355493998154, "grad_norm": 2.9224209785461426, "learning_rate": 4.446e-06, "loss": 0.6854, "step": 1485 }, { "epoch": 2.742382271468144, "grad_norm": 2.3546254634857178, "learning_rate": 4.449e-06, "loss": 0.6776, "step": 1486 }, { "epoch": 2.7442289935364728, "grad_norm": 2.4229791164398193, "learning_rate": 4.452e-06, "loss": 0.495, "step": 1487 }, { "epoch": 2.7460757156048015, "grad_norm": 2.2081387042999268, "learning_rate": 4.455e-06, "loss": 0.488, "step": 1488 }, { "epoch": 2.74792243767313, "grad_norm": 1.6628825664520264, "learning_rate": 4.458e-06, "loss": 0.4709, "step": 1489 }, { "epoch": 2.749769159741459, "grad_norm": 2.055128335952759, "learning_rate": 4.461e-06, "loss": 0.4067, "step": 1490 }, { "epoch": 2.7516158818097876, "grad_norm": 4.5744194984436035, "learning_rate": 4.464e-06, "loss": 0.3766, "step": 1491 }, { "epoch": 2.7534626038781163, "grad_norm": 2.0984883308410645, "learning_rate": 4.467000000000001e-06, "loss": 0.4352, "step": 1492 }, { "epoch": 2.755309325946445, "grad_norm": 1.9840891361236572, "learning_rate": 4.4699999999999996e-06, "loss": 0.42, "step": 1493 }, { "epoch": 2.7571560480147737, "grad_norm": 1.4575275182724, "learning_rate": 4.473e-06, "loss": 0.3601, "step": 1494 }, { "epoch": 2.7590027700831024, "grad_norm": 2.2698862552642822, "learning_rate": 4.476e-06, "loss": 0.3062, "step": 1495 }, { "epoch": 2.760849492151431, "grad_norm": 2.3743550777435303, "learning_rate": 4.479e-06, "loss": 0.4122, "step": 1496 }, { "epoch": 2.76269621421976, "grad_norm": 2.3653502464294434, "learning_rate": 4.4820000000000005e-06, "loss": 0.446, "step": 1497 }, { "epoch": 2.7645429362880884, "grad_norm": 1.8148143291473389, "learning_rate": 4.485e-06, "loss": 0.3055, "step": 1498 }, { "epoch": 2.7663896583564176, "grad_norm": 1.6854664087295532, "learning_rate": 4.488e-06, "loss": 0.2572, "step": 1499 }, { "epoch": 2.768236380424746, "grad_norm": 2.335649013519287, "learning_rate": 4.491e-06, "loss": 0.2348, "step": 1500 }, { "epoch": 2.770083102493075, "grad_norm": 2.0741183757781982, "learning_rate": 4.494e-06, "loss": 0.2342, "step": 1501 }, { "epoch": 2.7719298245614032, "grad_norm": 1.6792997121810913, "learning_rate": 4.497e-06, "loss": 0.1631, "step": 1502 }, { "epoch": 2.7737765466297324, "grad_norm": 2.1965067386627197, "learning_rate": 4.5e-06, "loss": 0.2065, "step": 1503 }, { "epoch": 2.775623268698061, "grad_norm": 3.105736017227173, "learning_rate": 4.503000000000001e-06, "loss": 0.1888, "step": 1504 }, { "epoch": 2.77746999076639, "grad_norm": 1.6356396675109863, "learning_rate": 4.506e-06, "loss": 0.2168, "step": 1505 }, { "epoch": 2.7793167128347185, "grad_norm": 1.6595216989517212, "learning_rate": 4.5089999999999995e-06, "loss": 0.1784, "step": 1506 }, { "epoch": 2.781163434903047, "grad_norm": 1.616804599761963, "learning_rate": 4.512e-06, "loss": 0.193, "step": 1507 }, { "epoch": 2.783010156971376, "grad_norm": 2.185166597366333, "learning_rate": 4.515e-06, "loss": 0.2007, "step": 1508 }, { "epoch": 2.7848568790397046, "grad_norm": 1.600041151046753, "learning_rate": 4.518000000000001e-06, "loss": 0.1807, "step": 1509 }, { "epoch": 2.7867036011080333, "grad_norm": 2.245534896850586, "learning_rate": 4.521e-06, "loss": 0.1933, "step": 1510 }, { "epoch": 2.788550323176362, "grad_norm": 2.0908350944519043, "learning_rate": 4.524e-06, "loss": 0.1914, "step": 1511 }, { "epoch": 2.7903970452446907, "grad_norm": 1.715218424797058, "learning_rate": 4.527e-06, "loss": 0.175, "step": 1512 }, { "epoch": 2.7922437673130194, "grad_norm": 1.8380025625228882, "learning_rate": 4.53e-06, "loss": 0.1799, "step": 1513 }, { "epoch": 2.794090489381348, "grad_norm": 3.243588447570801, "learning_rate": 4.5330000000000005e-06, "loss": 0.183, "step": 1514 }, { "epoch": 2.7959372114496768, "grad_norm": 2.1115996837615967, "learning_rate": 4.536e-06, "loss": 0.1827, "step": 1515 }, { "epoch": 2.7977839335180055, "grad_norm": 3.3165128231048584, "learning_rate": 4.539e-06, "loss": 0.1978, "step": 1516 }, { "epoch": 2.799630655586334, "grad_norm": 1.9535701274871826, "learning_rate": 4.542000000000001e-06, "loss": 0.1685, "step": 1517 }, { "epoch": 2.801477377654663, "grad_norm": 1.7053006887435913, "learning_rate": 4.545e-06, "loss": 0.1515, "step": 1518 }, { "epoch": 2.8033240997229916, "grad_norm": 2.9324450492858887, "learning_rate": 4.548e-06, "loss": 0.1939, "step": 1519 }, { "epoch": 2.8051708217913203, "grad_norm": 1.7410835027694702, "learning_rate": 4.551e-06, "loss": 0.1666, "step": 1520 }, { "epoch": 2.807017543859649, "grad_norm": 1.9593206644058228, "learning_rate": 4.554e-06, "loss": 0.1814, "step": 1521 }, { "epoch": 2.808864265927978, "grad_norm": 6.398316383361816, "learning_rate": 4.557000000000001e-06, "loss": 0.2177, "step": 1522 }, { "epoch": 2.8107109879963064, "grad_norm": 2.2251827716827393, "learning_rate": 4.56e-06, "loss": 0.2002, "step": 1523 }, { "epoch": 2.8125577100646355, "grad_norm": 2.4644134044647217, "learning_rate": 4.563e-06, "loss": 0.1882, "step": 1524 }, { "epoch": 2.8144044321329638, "grad_norm": 2.2142317295074463, "learning_rate": 4.566e-06, "loss": 0.1932, "step": 1525 }, { "epoch": 2.816251154201293, "grad_norm": 3.522850751876831, "learning_rate": 4.569e-06, "loss": 0.1872, "step": 1526 }, { "epoch": 2.818097876269621, "grad_norm": 1.950206995010376, "learning_rate": 4.5720000000000004e-06, "loss": 0.1613, "step": 1527 }, { "epoch": 2.8199445983379503, "grad_norm": 1.9772347211837769, "learning_rate": 4.575e-06, "loss": 0.1761, "step": 1528 }, { "epoch": 2.821791320406279, "grad_norm": 2.442274570465088, "learning_rate": 4.578000000000001e-06, "loss": 0.186, "step": 1529 }, { "epoch": 2.8236380424746077, "grad_norm": 2.1009490489959717, "learning_rate": 4.581e-06, "loss": 0.1791, "step": 1530 }, { "epoch": 2.8254847645429364, "grad_norm": 3.239732027053833, "learning_rate": 4.584e-06, "loss": 0.2082, "step": 1531 }, { "epoch": 2.827331486611265, "grad_norm": 2.345767021179199, "learning_rate": 4.587e-06, "loss": 0.1945, "step": 1532 }, { "epoch": 2.829178208679594, "grad_norm": 3.2505600452423096, "learning_rate": 4.59e-06, "loss": 0.2387, "step": 1533 }, { "epoch": 2.8310249307479225, "grad_norm": 4.839790344238281, "learning_rate": 4.593000000000001e-06, "loss": 0.3456, "step": 1534 }, { "epoch": 2.832871652816251, "grad_norm": 2.4936726093292236, "learning_rate": 4.5960000000000006e-06, "loss": 0.7057, "step": 1535 }, { "epoch": 2.83471837488458, "grad_norm": 3.3598785400390625, "learning_rate": 4.5989999999999995e-06, "loss": 0.5805, "step": 1536 }, { "epoch": 2.8365650969529086, "grad_norm": 1.8686054944992065, "learning_rate": 4.602e-06, "loss": 0.5297, "step": 1537 }, { "epoch": 2.8384118190212373, "grad_norm": 1.5554466247558594, "learning_rate": 4.605e-06, "loss": 0.4223, "step": 1538 }, { "epoch": 2.840258541089566, "grad_norm": 2.1304216384887695, "learning_rate": 4.608e-06, "loss": 0.4202, "step": 1539 }, { "epoch": 2.8421052631578947, "grad_norm": 2.460719108581543, "learning_rate": 4.611e-06, "loss": 0.4698, "step": 1540 }, { "epoch": 2.8439519852262234, "grad_norm": 1.9906436204910278, "learning_rate": 4.614e-06, "loss": 0.4742, "step": 1541 }, { "epoch": 2.845798707294552, "grad_norm": 2.511833906173706, "learning_rate": 4.617e-06, "loss": 0.3593, "step": 1542 }, { "epoch": 2.847645429362881, "grad_norm": 2.1805901527404785, "learning_rate": 4.62e-06, "loss": 0.3863, "step": 1543 }, { "epoch": 2.8494921514312095, "grad_norm": 1.7413595914840698, "learning_rate": 4.623e-06, "loss": 0.4269, "step": 1544 }, { "epoch": 2.851338873499538, "grad_norm": 2.286431312561035, "learning_rate": 4.626e-06, "loss": 0.3202, "step": 1545 }, { "epoch": 2.853185595567867, "grad_norm": 2.3051440715789795, "learning_rate": 4.629e-06, "loss": 0.3118, "step": 1546 }, { "epoch": 2.855032317636196, "grad_norm": 2.242866277694702, "learning_rate": 4.632000000000001e-06, "loss": 0.355, "step": 1547 }, { "epoch": 2.8568790397045243, "grad_norm": 1.6628878116607666, "learning_rate": 4.635e-06, "loss": 0.2136, "step": 1548 }, { "epoch": 2.8587257617728534, "grad_norm": 1.5874680280685425, "learning_rate": 4.6379999999999995e-06, "loss": 0.2046, "step": 1549 }, { "epoch": 2.8605724838411817, "grad_norm": 2.236630439758301, "learning_rate": 4.641e-06, "loss": 0.1926, "step": 1550 }, { "epoch": 2.862419205909511, "grad_norm": 2.906998872756958, "learning_rate": 4.644e-06, "loss": 0.1719, "step": 1551 }, { "epoch": 2.864265927977839, "grad_norm": 1.578851342201233, "learning_rate": 4.6470000000000006e-06, "loss": 0.2125, "step": 1552 }, { "epoch": 2.866112650046168, "grad_norm": 2.263638496398926, "learning_rate": 4.65e-06, "loss": 0.1364, "step": 1553 }, { "epoch": 2.867959372114497, "grad_norm": 1.3934235572814941, "learning_rate": 4.653e-06, "loss": 0.1701, "step": 1554 }, { "epoch": 2.8698060941828256, "grad_norm": 1.5761624574661255, "learning_rate": 4.656e-06, "loss": 0.1496, "step": 1555 }, { "epoch": 2.8716528162511543, "grad_norm": 2.035221576690674, "learning_rate": 4.659e-06, "loss": 0.15, "step": 1556 }, { "epoch": 2.873499538319483, "grad_norm": 1.6897352933883667, "learning_rate": 4.6620000000000004e-06, "loss": 0.158, "step": 1557 }, { "epoch": 2.8753462603878117, "grad_norm": 2.1436944007873535, "learning_rate": 4.665e-06, "loss": 0.1266, "step": 1558 }, { "epoch": 2.8771929824561404, "grad_norm": 2.5911736488342285, "learning_rate": 4.668e-06, "loss": 0.2247, "step": 1559 }, { "epoch": 2.879039704524469, "grad_norm": 2.169934034347534, "learning_rate": 4.671000000000001e-06, "loss": 0.2369, "step": 1560 }, { "epoch": 2.880886426592798, "grad_norm": 1.4315106868743896, "learning_rate": 4.674e-06, "loss": 0.1544, "step": 1561 }, { "epoch": 2.8827331486611265, "grad_norm": 1.5830488204956055, "learning_rate": 4.677e-06, "loss": 0.1559, "step": 1562 }, { "epoch": 2.884579870729455, "grad_norm": 2.221632480621338, "learning_rate": 4.68e-06, "loss": 0.1657, "step": 1563 }, { "epoch": 2.886426592797784, "grad_norm": 2.146801710128784, "learning_rate": 4.683e-06, "loss": 0.1606, "step": 1564 }, { "epoch": 2.8882733148661126, "grad_norm": 2.3033361434936523, "learning_rate": 4.6860000000000005e-06, "loss": 0.2723, "step": 1565 }, { "epoch": 2.8901200369344413, "grad_norm": 2.868062734603882, "learning_rate": 4.689e-06, "loss": 0.1503, "step": 1566 }, { "epoch": 2.89196675900277, "grad_norm": 1.7747246026992798, "learning_rate": 4.692e-06, "loss": 0.1728, "step": 1567 }, { "epoch": 2.8938134810710987, "grad_norm": 1.6821857690811157, "learning_rate": 4.695e-06, "loss": 0.141, "step": 1568 }, { "epoch": 2.8956602031394274, "grad_norm": 2.7381348609924316, "learning_rate": 4.698e-06, "loss": 0.2091, "step": 1569 }, { "epoch": 2.897506925207756, "grad_norm": 2.3321731090545654, "learning_rate": 4.701e-06, "loss": 0.2118, "step": 1570 }, { "epoch": 2.899353647276085, "grad_norm": 1.6755270957946777, "learning_rate": 4.704e-06, "loss": 0.1553, "step": 1571 }, { "epoch": 2.901200369344414, "grad_norm": 1.7802635431289673, "learning_rate": 4.707000000000001e-06, "loss": 0.1424, "step": 1572 }, { "epoch": 2.903047091412742, "grad_norm": 2.5739917755126953, "learning_rate": 4.71e-06, "loss": 0.1851, "step": 1573 }, { "epoch": 2.9048938134810713, "grad_norm": 2.763282060623169, "learning_rate": 4.713e-06, "loss": 0.2264, "step": 1574 }, { "epoch": 2.9067405355493996, "grad_norm": 1.8808858394622803, "learning_rate": 4.716e-06, "loss": 0.1417, "step": 1575 }, { "epoch": 2.9085872576177287, "grad_norm": 3.0323848724365234, "learning_rate": 4.719e-06, "loss": 0.1822, "step": 1576 }, { "epoch": 2.910433979686057, "grad_norm": 2.023909091949463, "learning_rate": 4.722000000000001e-06, "loss": 0.1641, "step": 1577 }, { "epoch": 2.912280701754386, "grad_norm": 1.6922128200531006, "learning_rate": 4.7250000000000005e-06, "loss": 0.1566, "step": 1578 }, { "epoch": 2.914127423822715, "grad_norm": 2.9664113521575928, "learning_rate": 4.7279999999999995e-06, "loss": 0.1718, "step": 1579 }, { "epoch": 2.9159741458910435, "grad_norm": 2.469337224960327, "learning_rate": 4.731e-06, "loss": 0.2049, "step": 1580 }, { "epoch": 2.9178208679593722, "grad_norm": 1.755744218826294, "learning_rate": 4.734e-06, "loss": 0.1615, "step": 1581 }, { "epoch": 2.919667590027701, "grad_norm": 4.633786678314209, "learning_rate": 4.7370000000000006e-06, "loss": 0.1993, "step": 1582 }, { "epoch": 2.9215143120960296, "grad_norm": 1.6162577867507935, "learning_rate": 4.74e-06, "loss": 0.1641, "step": 1583 }, { "epoch": 2.9233610341643583, "grad_norm": 3.0486338138580322, "learning_rate": 4.743e-06, "loss": 0.2215, "step": 1584 }, { "epoch": 2.925207756232687, "grad_norm": 4.850353717803955, "learning_rate": 4.746e-06, "loss": 0.6632, "step": 1585 }, { "epoch": 2.9270544783010157, "grad_norm": 2.6363439559936523, "learning_rate": 4.749e-06, "loss": 0.6831, "step": 1586 }, { "epoch": 2.9289012003693444, "grad_norm": 2.2633118629455566, "learning_rate": 4.752e-06, "loss": 0.5065, "step": 1587 }, { "epoch": 2.930747922437673, "grad_norm": 2.4349043369293213, "learning_rate": 4.755e-06, "loss": 0.5256, "step": 1588 }, { "epoch": 2.932594644506002, "grad_norm": 14.977548599243164, "learning_rate": 4.758e-06, "loss": 0.6051, "step": 1589 }, { "epoch": 2.9344413665743305, "grad_norm": 1.744310975074768, "learning_rate": 4.761000000000001e-06, "loss": 0.3641, "step": 1590 }, { "epoch": 2.936288088642659, "grad_norm": 2.5987777709960938, "learning_rate": 4.764e-06, "loss": 0.469, "step": 1591 }, { "epoch": 2.938134810710988, "grad_norm": 3.108370304107666, "learning_rate": 4.767e-06, "loss": 0.2756, "step": 1592 }, { "epoch": 2.9399815327793166, "grad_norm": 1.5362529754638672, "learning_rate": 4.77e-06, "loss": 0.3, "step": 1593 }, { "epoch": 2.9418282548476453, "grad_norm": 1.8580305576324463, "learning_rate": 4.773e-06, "loss": 0.2937, "step": 1594 }, { "epoch": 2.943674976915974, "grad_norm": 2.898499011993408, "learning_rate": 4.7760000000000005e-06, "loss": 0.2125, "step": 1595 }, { "epoch": 2.9455216989843027, "grad_norm": 2.2666280269622803, "learning_rate": 4.779e-06, "loss": 0.3014, "step": 1596 }, { "epoch": 2.9473684210526314, "grad_norm": 1.9050452709197998, "learning_rate": 4.782e-06, "loss": 0.2166, "step": 1597 }, { "epoch": 2.94921514312096, "grad_norm": 2.1083133220672607, "learning_rate": 4.785e-06, "loss": 0.1901, "step": 1598 }, { "epoch": 2.9510618651892893, "grad_norm": 2.402987480163574, "learning_rate": 4.788e-06, "loss": 0.2178, "step": 1599 }, { "epoch": 2.9529085872576175, "grad_norm": 1.4459607601165771, "learning_rate": 4.791e-06, "loss": 0.2102, "step": 1600 }, { "epoch": 2.9547553093259467, "grad_norm": 1.7319731712341309, "learning_rate": 4.794e-06, "loss": 0.2142, "step": 1601 }, { "epoch": 2.956602031394275, "grad_norm": 1.5321577787399292, "learning_rate": 4.797e-06, "loss": 0.1676, "step": 1602 }, { "epoch": 2.958448753462604, "grad_norm": 1.6956512928009033, "learning_rate": 4.800000000000001e-06, "loss": 0.1819, "step": 1603 }, { "epoch": 2.9602954755309328, "grad_norm": 2.4199330806732178, "learning_rate": 4.803e-06, "loss": 0.1777, "step": 1604 }, { "epoch": 2.9621421975992615, "grad_norm": 1.6201728582382202, "learning_rate": 4.806e-06, "loss": 0.155, "step": 1605 }, { "epoch": 2.96398891966759, "grad_norm": 2.5003530979156494, "learning_rate": 4.809e-06, "loss": 0.1551, "step": 1606 }, { "epoch": 2.965835641735919, "grad_norm": 2.197706699371338, "learning_rate": 4.812e-06, "loss": 0.1395, "step": 1607 }, { "epoch": 2.9676823638042475, "grad_norm": 1.5907740592956543, "learning_rate": 4.8150000000000005e-06, "loss": 0.1677, "step": 1608 }, { "epoch": 2.9695290858725762, "grad_norm": 2.2628791332244873, "learning_rate": 4.818e-06, "loss": 0.1671, "step": 1609 }, { "epoch": 2.971375807940905, "grad_norm": 2.739673376083374, "learning_rate": 4.821e-06, "loss": 0.1803, "step": 1610 }, { "epoch": 2.9732225300092336, "grad_norm": 1.834478497505188, "learning_rate": 4.824e-06, "loss": 0.1546, "step": 1611 }, { "epoch": 2.9750692520775623, "grad_norm": 1.9884629249572754, "learning_rate": 4.827e-06, "loss": 0.1619, "step": 1612 }, { "epoch": 2.976915974145891, "grad_norm": 1.7522231340408325, "learning_rate": 4.83e-06, "loss": 0.1375, "step": 1613 }, { "epoch": 2.9787626962142197, "grad_norm": 1.8641668558120728, "learning_rate": 4.833e-06, "loss": 0.1763, "step": 1614 }, { "epoch": 2.9806094182825484, "grad_norm": 2.795372724533081, "learning_rate": 4.836000000000001e-06, "loss": 0.1819, "step": 1615 }, { "epoch": 2.982456140350877, "grad_norm": 1.361962914466858, "learning_rate": 4.839e-06, "loss": 0.1448, "step": 1616 }, { "epoch": 2.984302862419206, "grad_norm": 2.029982805252075, "learning_rate": 4.8419999999999996e-06, "loss": 0.1818, "step": 1617 }, { "epoch": 2.9861495844875345, "grad_norm": 3.986133575439453, "learning_rate": 4.845e-06, "loss": 0.1743, "step": 1618 }, { "epoch": 2.9879963065558632, "grad_norm": 2.4707727432250977, "learning_rate": 4.848e-06, "loss": 0.1853, "step": 1619 }, { "epoch": 2.989843028624192, "grad_norm": 3.1868081092834473, "learning_rate": 4.851000000000001e-06, "loss": 0.1881, "step": 1620 }, { "epoch": 2.9916897506925206, "grad_norm": 2.083648204803467, "learning_rate": 4.8540000000000005e-06, "loss": 0.1776, "step": 1621 }, { "epoch": 2.9935364727608493, "grad_norm": 2.8003695011138916, "learning_rate": 4.856999999999999e-06, "loss": 0.1843, "step": 1622 }, { "epoch": 2.995383194829178, "grad_norm": 2.0337893962860107, "learning_rate": 4.86e-06, "loss": 0.1757, "step": 1623 }, { "epoch": 2.997229916897507, "grad_norm": 2.571591854095459, "learning_rate": 4.863e-06, "loss": 0.1648, "step": 1624 }, { "epoch": 2.9990766389658354, "grad_norm": 2.681323766708374, "learning_rate": 4.8660000000000005e-06, "loss": 0.2077, "step": 1625 }, { "epoch": 3.0, "grad_norm": 1.5731614828109741, "learning_rate": 4.869e-06, "loss": 0.0914, "step": 1626 }, { "epoch": 3.0018467220683287, "grad_norm": 2.272230386734009, "learning_rate": 4.872e-06, "loss": 0.5649, "step": 1627 }, { "epoch": 3.0036934441366574, "grad_norm": 2.1991465091705322, "learning_rate": 4.875e-06, "loss": 0.624, "step": 1628 }, { "epoch": 3.005540166204986, "grad_norm": 1.6478530168533325, "learning_rate": 4.878e-06, "loss": 0.563, "step": 1629 }, { "epoch": 3.007386888273315, "grad_norm": 1.7868117094039917, "learning_rate": 4.881e-06, "loss": 0.4616, "step": 1630 }, { "epoch": 3.0092336103416435, "grad_norm": 1.7761942148208618, "learning_rate": 4.884e-06, "loss": 0.4051, "step": 1631 }, { "epoch": 3.011080332409972, "grad_norm": 1.7362419366836548, "learning_rate": 4.887e-06, "loss": 0.4763, "step": 1632 }, { "epoch": 3.012927054478301, "grad_norm": 2.110557794570923, "learning_rate": 4.890000000000001e-06, "loss": 0.4974, "step": 1633 }, { "epoch": 3.0147737765466296, "grad_norm": 1.771098017692566, "learning_rate": 4.8929999999999996e-06, "loss": 0.3519, "step": 1634 }, { "epoch": 3.0166204986149583, "grad_norm": 1.9019705057144165, "learning_rate": 4.896e-06, "loss": 0.3971, "step": 1635 }, { "epoch": 3.018467220683287, "grad_norm": 1.8817514181137085, "learning_rate": 4.899e-06, "loss": 0.3578, "step": 1636 }, { "epoch": 3.0203139427516157, "grad_norm": 1.5816795825958252, "learning_rate": 4.902e-06, "loss": 0.3423, "step": 1637 }, { "epoch": 3.0221606648199444, "grad_norm": 2.711153030395508, "learning_rate": 4.9050000000000005e-06, "loss": 0.2532, "step": 1638 }, { "epoch": 3.0240073868882735, "grad_norm": 1.6076635122299194, "learning_rate": 4.908e-06, "loss": 0.2882, "step": 1639 }, { "epoch": 3.0258541089566022, "grad_norm": 1.8714416027069092, "learning_rate": 4.911e-06, "loss": 0.2771, "step": 1640 }, { "epoch": 3.027700831024931, "grad_norm": 2.4448885917663574, "learning_rate": 4.914e-06, "loss": 0.2915, "step": 1641 }, { "epoch": 3.0295475530932596, "grad_norm": 1.5730973482131958, "learning_rate": 4.917e-06, "loss": 0.1761, "step": 1642 }, { "epoch": 3.0313942751615883, "grad_norm": 1.8119219541549683, "learning_rate": 4.92e-06, "loss": 0.172, "step": 1643 }, { "epoch": 3.033240997229917, "grad_norm": 1.9520834684371948, "learning_rate": 4.923e-06, "loss": 0.2029, "step": 1644 }, { "epoch": 3.0350877192982457, "grad_norm": 1.4923053979873657, "learning_rate": 4.926000000000001e-06, "loss": 0.1399, "step": 1645 }, { "epoch": 3.0369344413665744, "grad_norm": 1.0933345556259155, "learning_rate": 4.929000000000001e-06, "loss": 0.1687, "step": 1646 }, { "epoch": 3.038781163434903, "grad_norm": 2.112572193145752, "learning_rate": 4.9319999999999995e-06, "loss": 0.2484, "step": 1647 }, { "epoch": 3.040627885503232, "grad_norm": 1.7236456871032715, "learning_rate": 4.935e-06, "loss": 0.1377, "step": 1648 }, { "epoch": 3.0424746075715605, "grad_norm": 1.726274847984314, "learning_rate": 4.938e-06, "loss": 0.1647, "step": 1649 }, { "epoch": 3.044321329639889, "grad_norm": 1.6629983186721802, "learning_rate": 4.941000000000001e-06, "loss": 0.1538, "step": 1650 }, { "epoch": 3.046168051708218, "grad_norm": 1.8645832538604736, "learning_rate": 4.9440000000000004e-06, "loss": 0.1484, "step": 1651 }, { "epoch": 3.0480147737765466, "grad_norm": 1.8150570392608643, "learning_rate": 4.947e-06, "loss": 0.1349, "step": 1652 }, { "epoch": 3.0498614958448753, "grad_norm": 2.544318199157715, "learning_rate": 4.95e-06, "loss": 0.1593, "step": 1653 }, { "epoch": 3.051708217913204, "grad_norm": 2.0313374996185303, "learning_rate": 4.953e-06, "loss": 0.1239, "step": 1654 }, { "epoch": 3.0535549399815327, "grad_norm": 1.766607642173767, "learning_rate": 4.9560000000000005e-06, "loss": 0.1705, "step": 1655 }, { "epoch": 3.0554016620498614, "grad_norm": 1.6488720178604126, "learning_rate": 4.959e-06, "loss": 0.1477, "step": 1656 }, { "epoch": 3.05724838411819, "grad_norm": 1.9187687635421753, "learning_rate": 4.962e-06, "loss": 0.1626, "step": 1657 }, { "epoch": 3.059095106186519, "grad_norm": 1.9352442026138306, "learning_rate": 4.965000000000001e-06, "loss": 0.1291, "step": 1658 }, { "epoch": 3.0609418282548475, "grad_norm": 2.122387647628784, "learning_rate": 4.968e-06, "loss": 0.2403, "step": 1659 }, { "epoch": 3.062788550323176, "grad_norm": 1.7077113389968872, "learning_rate": 4.9709999999999995e-06, "loss": 0.1439, "step": 1660 }, { "epoch": 3.064635272391505, "grad_norm": 2.4755544662475586, "learning_rate": 4.974e-06, "loss": 0.1117, "step": 1661 }, { "epoch": 3.0664819944598336, "grad_norm": 1.930274248123169, "learning_rate": 4.977e-06, "loss": 0.1821, "step": 1662 }, { "epoch": 3.0683287165281623, "grad_norm": 1.8199743032455444, "learning_rate": 4.980000000000001e-06, "loss": 0.1167, "step": 1663 }, { "epoch": 3.0701754385964914, "grad_norm": 1.5116584300994873, "learning_rate": 4.983e-06, "loss": 0.1242, "step": 1664 }, { "epoch": 3.07202216066482, "grad_norm": 1.9836688041687012, "learning_rate": 4.985999999999999e-06, "loss": 0.186, "step": 1665 }, { "epoch": 3.073868882733149, "grad_norm": 2.199866533279419, "learning_rate": 4.989e-06, "loss": 0.1463, "step": 1666 }, { "epoch": 3.0757156048014775, "grad_norm": 1.57767653465271, "learning_rate": 4.992e-06, "loss": 0.1495, "step": 1667 }, { "epoch": 3.0775623268698062, "grad_norm": 9.19756031036377, "learning_rate": 4.9950000000000005e-06, "loss": 0.1426, "step": 1668 }, { "epoch": 3.079409048938135, "grad_norm": 2.502916097640991, "learning_rate": 4.998e-06, "loss": 0.1973, "step": 1669 }, { "epoch": 3.0812557710064636, "grad_norm": 2.128638982772827, "learning_rate": 5.001e-06, "loss": 0.1319, "step": 1670 }, { "epoch": 3.0831024930747923, "grad_norm": 1.7738232612609863, "learning_rate": 5.004e-06, "loss": 0.155, "step": 1671 }, { "epoch": 3.084949215143121, "grad_norm": 2.350332260131836, "learning_rate": 5.007e-06, "loss": 0.1543, "step": 1672 }, { "epoch": 3.0867959372114497, "grad_norm": 1.9206435680389404, "learning_rate": 5.01e-06, "loss": 0.1919, "step": 1673 }, { "epoch": 3.0886426592797784, "grad_norm": 2.0797274112701416, "learning_rate": 5.013e-06, "loss": 0.1699, "step": 1674 }, { "epoch": 3.090489381348107, "grad_norm": 1.9949361085891724, "learning_rate": 5.016e-06, "loss": 0.178, "step": 1675 }, { "epoch": 3.092336103416436, "grad_norm": 2.23211669921875, "learning_rate": 5.0190000000000006e-06, "loss": 0.1716, "step": 1676 }, { "epoch": 3.0941828254847645, "grad_norm": 4.083524703979492, "learning_rate": 5.0219999999999995e-06, "loss": 0.6273, "step": 1677 }, { "epoch": 3.0960295475530932, "grad_norm": 1.6188116073608398, "learning_rate": 5.025e-06, "loss": 0.5839, "step": 1678 }, { "epoch": 3.097876269621422, "grad_norm": 2.0627925395965576, "learning_rate": 5.028e-06, "loss": 0.5534, "step": 1679 }, { "epoch": 3.0997229916897506, "grad_norm": 2.4019577503204346, "learning_rate": 5.031e-06, "loss": 0.5056, "step": 1680 }, { "epoch": 3.1015697137580793, "grad_norm": 2.0087342262268066, "learning_rate": 5.034e-06, "loss": 0.4375, "step": 1681 }, { "epoch": 3.103416435826408, "grad_norm": 1.7077587842941284, "learning_rate": 5.037e-06, "loss": 0.4366, "step": 1682 }, { "epoch": 3.1052631578947367, "grad_norm": 1.9419629573822021, "learning_rate": 5.04e-06, "loss": 0.3506, "step": 1683 }, { "epoch": 3.1071098799630654, "grad_norm": 1.676218867301941, "learning_rate": 5.043e-06, "loss": 0.3371, "step": 1684 }, { "epoch": 3.108956602031394, "grad_norm": 1.9952783584594727, "learning_rate": 5.046e-06, "loss": 0.3364, "step": 1685 }, { "epoch": 3.110803324099723, "grad_norm": 2.8741376399993896, "learning_rate": 5.049e-06, "loss": 0.3141, "step": 1686 }, { "epoch": 3.1126500461680515, "grad_norm": 1.4315117597579956, "learning_rate": 5.052e-06, "loss": 0.2986, "step": 1687 }, { "epoch": 3.11449676823638, "grad_norm": 1.3568508625030518, "learning_rate": 5.055000000000001e-06, "loss": 0.2689, "step": 1688 }, { "epoch": 3.1163434903047094, "grad_norm": 2.1754391193389893, "learning_rate": 5.0580000000000005e-06, "loss": 0.2782, "step": 1689 }, { "epoch": 3.118190212373038, "grad_norm": 2.0119006633758545, "learning_rate": 5.0609999999999995e-06, "loss": 0.2859, "step": 1690 }, { "epoch": 3.1200369344413668, "grad_norm": 1.4242953062057495, "learning_rate": 5.064e-06, "loss": 0.2626, "step": 1691 }, { "epoch": 3.1218836565096955, "grad_norm": 1.5842236280441284, "learning_rate": 5.067e-06, "loss": 0.2223, "step": 1692 }, { "epoch": 3.123730378578024, "grad_norm": 4.055980205535889, "learning_rate": 5.070000000000001e-06, "loss": 0.1274, "step": 1693 }, { "epoch": 3.125577100646353, "grad_norm": 2.017376184463501, "learning_rate": 5.073e-06, "loss": 0.2235, "step": 1694 }, { "epoch": 3.1274238227146816, "grad_norm": 1.4071455001831055, "learning_rate": 5.076e-06, "loss": 0.1488, "step": 1695 }, { "epoch": 3.1292705447830103, "grad_norm": 1.4524205923080444, "learning_rate": 5.079e-06, "loss": 0.1208, "step": 1696 }, { "epoch": 3.131117266851339, "grad_norm": 2.6339752674102783, "learning_rate": 5.082e-06, "loss": 0.1519, "step": 1697 }, { "epoch": 3.1329639889196677, "grad_norm": 1.3864768743515015, "learning_rate": 5.0850000000000004e-06, "loss": 0.1285, "step": 1698 }, { "epoch": 3.1348107109879964, "grad_norm": 1.4206315279006958, "learning_rate": 5.088e-06, "loss": 0.1302, "step": 1699 }, { "epoch": 3.136657433056325, "grad_norm": 1.1164093017578125, "learning_rate": 5.091e-06, "loss": 0.1233, "step": 1700 }, { "epoch": 3.1385041551246537, "grad_norm": 2.3856983184814453, "learning_rate": 5.094000000000001e-06, "loss": 0.1948, "step": 1701 }, { "epoch": 3.1403508771929824, "grad_norm": 1.6560757160186768, "learning_rate": 5.097e-06, "loss": 0.1125, "step": 1702 }, { "epoch": 3.142197599261311, "grad_norm": 1.2046996355056763, "learning_rate": 5.1e-06, "loss": 0.1099, "step": 1703 }, { "epoch": 3.14404432132964, "grad_norm": 2.4310736656188965, "learning_rate": 5.103e-06, "loss": 0.1507, "step": 1704 }, { "epoch": 3.1458910433979685, "grad_norm": 1.898454189300537, "learning_rate": 5.106e-06, "loss": 0.1306, "step": 1705 }, { "epoch": 3.1477377654662972, "grad_norm": 2.1356546878814697, "learning_rate": 5.1090000000000006e-06, "loss": 0.1367, "step": 1706 }, { "epoch": 3.149584487534626, "grad_norm": 2.0182154178619385, "learning_rate": 5.112e-06, "loss": 0.1339, "step": 1707 }, { "epoch": 3.1514312096029546, "grad_norm": 1.420379638671875, "learning_rate": 5.115e-06, "loss": 0.1415, "step": 1708 }, { "epoch": 3.1532779316712833, "grad_norm": 9.14201831817627, "learning_rate": 5.118e-06, "loss": 0.144, "step": 1709 }, { "epoch": 3.155124653739612, "grad_norm": 2.5935096740722656, "learning_rate": 5.121e-06, "loss": 0.1447, "step": 1710 }, { "epoch": 3.1569713758079407, "grad_norm": 2.102509021759033, "learning_rate": 5.124e-06, "loss": 0.1698, "step": 1711 }, { "epoch": 3.1588180978762694, "grad_norm": 1.7125483751296997, "learning_rate": 5.127e-06, "loss": 0.1426, "step": 1712 }, { "epoch": 3.160664819944598, "grad_norm": 2.138498306274414, "learning_rate": 5.130000000000001e-06, "loss": 0.1363, "step": 1713 }, { "epoch": 3.1625115420129273, "grad_norm": 2.0215048789978027, "learning_rate": 5.133e-06, "loss": 0.1309, "step": 1714 }, { "epoch": 3.164358264081256, "grad_norm": 3.2613959312438965, "learning_rate": 5.136e-06, "loss": 0.1733, "step": 1715 }, { "epoch": 3.1662049861495847, "grad_norm": 2.8282909393310547, "learning_rate": 5.139e-06, "loss": 0.1563, "step": 1716 }, { "epoch": 3.1680517082179134, "grad_norm": 1.946689486503601, "learning_rate": 5.142e-06, "loss": 0.1375, "step": 1717 }, { "epoch": 3.169898430286242, "grad_norm": 1.8160659074783325, "learning_rate": 5.145000000000001e-06, "loss": 0.1319, "step": 1718 }, { "epoch": 3.1717451523545708, "grad_norm": 1.7862443923950195, "learning_rate": 5.1480000000000005e-06, "loss": 0.1782, "step": 1719 }, { "epoch": 3.1735918744228995, "grad_norm": 2.174208879470825, "learning_rate": 5.1509999999999995e-06, "loss": 0.1747, "step": 1720 }, { "epoch": 3.175438596491228, "grad_norm": 2.127056121826172, "learning_rate": 5.154e-06, "loss": 0.1822, "step": 1721 }, { "epoch": 3.177285318559557, "grad_norm": 1.744895577430725, "learning_rate": 5.157e-06, "loss": 0.1204, "step": 1722 }, { "epoch": 3.1791320406278856, "grad_norm": 1.6898212432861328, "learning_rate": 5.16e-06, "loss": 0.1292, "step": 1723 }, { "epoch": 3.1809787626962143, "grad_norm": 1.7619431018829346, "learning_rate": 5.163e-06, "loss": 0.1442, "step": 1724 }, { "epoch": 3.182825484764543, "grad_norm": 1.6283921003341675, "learning_rate": 5.166e-06, "loss": 0.1423, "step": 1725 }, { "epoch": 3.1846722068328717, "grad_norm": 3.300206184387207, "learning_rate": 5.169e-06, "loss": 0.1778, "step": 1726 }, { "epoch": 3.1865189289012004, "grad_norm": 1.6233912706375122, "learning_rate": 5.172e-06, "loss": 0.534, "step": 1727 }, { "epoch": 3.188365650969529, "grad_norm": 2.2645161151885986, "learning_rate": 5.175e-06, "loss": 0.589, "step": 1728 }, { "epoch": 3.1902123730378578, "grad_norm": 4.905712127685547, "learning_rate": 5.178e-06, "loss": 0.4501, "step": 1729 }, { "epoch": 3.1920590951061865, "grad_norm": 2.5921883583068848, "learning_rate": 5.181e-06, "loss": 0.4044, "step": 1730 }, { "epoch": 3.193905817174515, "grad_norm": 1.6506649255752563, "learning_rate": 5.184000000000001e-06, "loss": 0.347, "step": 1731 }, { "epoch": 3.195752539242844, "grad_norm": 1.9817479848861694, "learning_rate": 5.1870000000000005e-06, "loss": 0.4327, "step": 1732 }, { "epoch": 3.1975992613111726, "grad_norm": 2.370718240737915, "learning_rate": 5.1899999999999994e-06, "loss": 0.3587, "step": 1733 }, { "epoch": 3.1994459833795013, "grad_norm": 1.4260053634643555, "learning_rate": 5.193e-06, "loss": 0.2848, "step": 1734 }, { "epoch": 3.20129270544783, "grad_norm": 1.3881109952926636, "learning_rate": 5.196e-06, "loss": 0.2891, "step": 1735 }, { "epoch": 3.2031394275161587, "grad_norm": 1.8376153707504272, "learning_rate": 5.1990000000000005e-06, "loss": 0.3453, "step": 1736 }, { "epoch": 3.2049861495844874, "grad_norm": 2.22748064994812, "learning_rate": 5.202e-06, "loss": 0.4393, "step": 1737 }, { "epoch": 3.206832871652816, "grad_norm": 1.5622321367263794, "learning_rate": 5.205e-06, "loss": 0.2238, "step": 1738 }, { "epoch": 3.208679593721145, "grad_norm": 2.117229461669922, "learning_rate": 5.208e-06, "loss": 0.2132, "step": 1739 }, { "epoch": 3.2105263157894735, "grad_norm": 5.6894073486328125, "learning_rate": 5.211e-06, "loss": 0.1495, "step": 1740 }, { "epoch": 3.2123730378578026, "grad_norm": 1.2661709785461426, "learning_rate": 5.214e-06, "loss": 0.1761, "step": 1741 }, { "epoch": 3.2142197599261313, "grad_norm": 1.3423945903778076, "learning_rate": 5.217e-06, "loss": 0.1238, "step": 1742 }, { "epoch": 3.21606648199446, "grad_norm": 1.421120047569275, "learning_rate": 5.22e-06, "loss": 0.1806, "step": 1743 }, { "epoch": 3.2179132040627887, "grad_norm": 1.6990739107131958, "learning_rate": 5.223000000000001e-06, "loss": 0.1248, "step": 1744 }, { "epoch": 3.2197599261311174, "grad_norm": 2.5295536518096924, "learning_rate": 5.226e-06, "loss": 0.1372, "step": 1745 }, { "epoch": 3.221606648199446, "grad_norm": 1.3577442169189453, "learning_rate": 5.229e-06, "loss": 0.145, "step": 1746 }, { "epoch": 3.223453370267775, "grad_norm": 1.1989928483963013, "learning_rate": 5.232e-06, "loss": 0.0898, "step": 1747 }, { "epoch": 3.2253000923361035, "grad_norm": 2.8232219219207764, "learning_rate": 5.235e-06, "loss": 0.164, "step": 1748 }, { "epoch": 3.227146814404432, "grad_norm": 2.043588876724243, "learning_rate": 5.2380000000000005e-06, "loss": 0.1451, "step": 1749 }, { "epoch": 3.228993536472761, "grad_norm": 2.160836696624756, "learning_rate": 5.241e-06, "loss": 0.1178, "step": 1750 }, { "epoch": 3.2308402585410896, "grad_norm": 1.4974416494369507, "learning_rate": 5.244e-06, "loss": 0.1553, "step": 1751 }, { "epoch": 3.2326869806094183, "grad_norm": 2.6034817695617676, "learning_rate": 5.247e-06, "loss": 0.172, "step": 1752 }, { "epoch": 3.234533702677747, "grad_norm": 1.3825095891952515, "learning_rate": 5.25e-06, "loss": 0.138, "step": 1753 }, { "epoch": 3.2363804247460757, "grad_norm": 1.5406382083892822, "learning_rate": 5.253e-06, "loss": 0.1471, "step": 1754 }, { "epoch": 3.2382271468144044, "grad_norm": 2.92680287361145, "learning_rate": 5.256e-06, "loss": 0.1445, "step": 1755 }, { "epoch": 3.240073868882733, "grad_norm": 4.907720565795898, "learning_rate": 5.259000000000001e-06, "loss": 0.1616, "step": 1756 }, { "epoch": 3.2419205909510618, "grad_norm": 1.7645773887634277, "learning_rate": 5.262e-06, "loss": 0.1392, "step": 1757 }, { "epoch": 3.2437673130193905, "grad_norm": 2.366370677947998, "learning_rate": 5.2649999999999996e-06, "loss": 0.1177, "step": 1758 }, { "epoch": 3.245614035087719, "grad_norm": 3.015122890472412, "learning_rate": 5.268e-06, "loss": 0.1436, "step": 1759 }, { "epoch": 3.247460757156048, "grad_norm": 1.498371958732605, "learning_rate": 5.271e-06, "loss": 0.1338, "step": 1760 }, { "epoch": 3.2493074792243766, "grad_norm": 1.8410630226135254, "learning_rate": 5.274000000000001e-06, "loss": 0.1333, "step": 1761 }, { "epoch": 3.2511542012927053, "grad_norm": 1.737515926361084, "learning_rate": 5.2770000000000005e-06, "loss": 0.1199, "step": 1762 }, { "epoch": 3.253000923361034, "grad_norm": 1.9851926565170288, "learning_rate": 5.279999999999999e-06, "loss": 0.1505, "step": 1763 }, { "epoch": 3.254847645429363, "grad_norm": 5.256008148193359, "learning_rate": 5.283e-06, "loss": 0.1445, "step": 1764 }, { "epoch": 3.2566943674976914, "grad_norm": 1.6933232545852661, "learning_rate": 5.286e-06, "loss": 0.1317, "step": 1765 }, { "epoch": 3.2585410895660205, "grad_norm": 2.7634260654449463, "learning_rate": 5.2890000000000005e-06, "loss": 0.1561, "step": 1766 }, { "epoch": 3.260387811634349, "grad_norm": 2.7004668712615967, "learning_rate": 5.292e-06, "loss": 0.1626, "step": 1767 }, { "epoch": 3.262234533702678, "grad_norm": 1.7570018768310547, "learning_rate": 5.295e-06, "loss": 0.1528, "step": 1768 }, { "epoch": 3.2640812557710066, "grad_norm": 2.9642531871795654, "learning_rate": 5.298e-06, "loss": 0.11, "step": 1769 }, { "epoch": 3.2659279778393353, "grad_norm": 2.9844934940338135, "learning_rate": 5.301e-06, "loss": 0.1467, "step": 1770 }, { "epoch": 3.267774699907664, "grad_norm": 1.9152250289916992, "learning_rate": 5.304e-06, "loss": 0.1505, "step": 1771 }, { "epoch": 3.2696214219759927, "grad_norm": 2.3223955631256104, "learning_rate": 5.307e-06, "loss": 0.1601, "step": 1772 }, { "epoch": 3.2714681440443214, "grad_norm": 1.631397008895874, "learning_rate": 5.31e-06, "loss": 0.1601, "step": 1773 }, { "epoch": 3.27331486611265, "grad_norm": 2.114636182785034, "learning_rate": 5.313000000000001e-06, "loss": 0.1995, "step": 1774 }, { "epoch": 3.275161588180979, "grad_norm": 1.5473500490188599, "learning_rate": 5.3160000000000004e-06, "loss": 0.145, "step": 1775 }, { "epoch": 3.2770083102493075, "grad_norm": 2.362990379333496, "learning_rate": 5.319e-06, "loss": 0.2343, "step": 1776 }, { "epoch": 3.278855032317636, "grad_norm": 3.1484501361846924, "learning_rate": 5.322e-06, "loss": 0.5929, "step": 1777 }, { "epoch": 3.280701754385965, "grad_norm": 5.884219646453857, "learning_rate": 5.325e-06, "loss": 0.666, "step": 1778 }, { "epoch": 3.2825484764542936, "grad_norm": 1.5649977922439575, "learning_rate": 5.3280000000000005e-06, "loss": 0.4397, "step": 1779 }, { "epoch": 3.2843951985226223, "grad_norm": 2.324662446975708, "learning_rate": 5.331e-06, "loss": 0.4573, "step": 1780 }, { "epoch": 3.286241920590951, "grad_norm": 1.8567326068878174, "learning_rate": 5.334000000000001e-06, "loss": 0.3962, "step": 1781 }, { "epoch": 3.2880886426592797, "grad_norm": 2.1249706745147705, "learning_rate": 5.337e-06, "loss": 0.3903, "step": 1782 }, { "epoch": 3.2899353647276084, "grad_norm": 2.6666476726531982, "learning_rate": 5.34e-06, "loss": 0.3403, "step": 1783 }, { "epoch": 3.291782086795937, "grad_norm": 1.413912296295166, "learning_rate": 5.343e-06, "loss": 0.3315, "step": 1784 }, { "epoch": 3.293628808864266, "grad_norm": 2.263784885406494, "learning_rate": 5.346e-06, "loss": 0.3878, "step": 1785 }, { "epoch": 3.2954755309325945, "grad_norm": 1.5043153762817383, "learning_rate": 5.349e-06, "loss": 0.3375, "step": 1786 }, { "epoch": 3.297322253000923, "grad_norm": 1.6676349639892578, "learning_rate": 5.352000000000001e-06, "loss": 0.2909, "step": 1787 }, { "epoch": 3.299168975069252, "grad_norm": 2.5972864627838135, "learning_rate": 5.3549999999999996e-06, "loss": 0.3257, "step": 1788 }, { "epoch": 3.301015697137581, "grad_norm": 1.6890889406204224, "learning_rate": 5.358e-06, "loss": 0.2643, "step": 1789 }, { "epoch": 3.3028624192059093, "grad_norm": 1.1364909410476685, "learning_rate": 5.361e-06, "loss": 0.2016, "step": 1790 }, { "epoch": 3.3047091412742384, "grad_norm": 1.5025843381881714, "learning_rate": 5.364e-06, "loss": 0.2205, "step": 1791 }, { "epoch": 3.306555863342567, "grad_norm": 1.2549930810928345, "learning_rate": 5.3670000000000005e-06, "loss": 0.2018, "step": 1792 }, { "epoch": 3.308402585410896, "grad_norm": 2.2258710861206055, "learning_rate": 5.37e-06, "loss": 0.1963, "step": 1793 }, { "epoch": 3.3102493074792245, "grad_norm": 1.2186377048492432, "learning_rate": 5.373e-06, "loss": 0.1456, "step": 1794 }, { "epoch": 3.312096029547553, "grad_norm": 1.2291967868804932, "learning_rate": 5.376e-06, "loss": 0.1224, "step": 1795 }, { "epoch": 3.313942751615882, "grad_norm": 0.9632380604743958, "learning_rate": 5.379e-06, "loss": 0.0991, "step": 1796 }, { "epoch": 3.3157894736842106, "grad_norm": 1.194434404373169, "learning_rate": 5.382e-06, "loss": 0.095, "step": 1797 }, { "epoch": 3.3176361957525393, "grad_norm": 1.77876877784729, "learning_rate": 5.385e-06, "loss": 0.1738, "step": 1798 }, { "epoch": 3.319482917820868, "grad_norm": 2.20609974861145, "learning_rate": 5.388000000000001e-06, "loss": 0.105, "step": 1799 }, { "epoch": 3.3213296398891967, "grad_norm": 1.5217195749282837, "learning_rate": 5.391e-06, "loss": 0.1241, "step": 1800 }, { "epoch": 3.3231763619575254, "grad_norm": 1.194684386253357, "learning_rate": 5.3939999999999995e-06, "loss": 0.1365, "step": 1801 }, { "epoch": 3.325023084025854, "grad_norm": 1.244110345840454, "learning_rate": 5.397e-06, "loss": 0.1279, "step": 1802 }, { "epoch": 3.326869806094183, "grad_norm": 2.9739558696746826, "learning_rate": 5.4e-06, "loss": 0.135, "step": 1803 }, { "epoch": 3.3287165281625115, "grad_norm": 1.8225957155227661, "learning_rate": 5.403000000000001e-06, "loss": 0.1005, "step": 1804 }, { "epoch": 3.33056325023084, "grad_norm": 1.5515562295913696, "learning_rate": 5.406e-06, "loss": 0.1708, "step": 1805 }, { "epoch": 3.332409972299169, "grad_norm": 1.735154151916504, "learning_rate": 5.408999999999999e-06, "loss": 0.1091, "step": 1806 }, { "epoch": 3.3342566943674976, "grad_norm": 1.587384819984436, "learning_rate": 5.412e-06, "loss": 0.1558, "step": 1807 }, { "epoch": 3.3361034164358263, "grad_norm": 1.638647198677063, "learning_rate": 5.415e-06, "loss": 0.1064, "step": 1808 }, { "epoch": 3.337950138504155, "grad_norm": 1.2404173612594604, "learning_rate": 5.4180000000000005e-06, "loss": 0.1065, "step": 1809 }, { "epoch": 3.3397968605724837, "grad_norm": 3.201526165008545, "learning_rate": 5.421e-06, "loss": 0.1334, "step": 1810 }, { "epoch": 3.3416435826408124, "grad_norm": 2.358724355697632, "learning_rate": 5.424e-06, "loss": 0.134, "step": 1811 }, { "epoch": 3.343490304709141, "grad_norm": 1.3843193054199219, "learning_rate": 5.427e-06, "loss": 0.0946, "step": 1812 }, { "epoch": 3.34533702677747, "grad_norm": 2.5044610500335693, "learning_rate": 5.43e-06, "loss": 0.1552, "step": 1813 }, { "epoch": 3.347183748845799, "grad_norm": 1.6049011945724487, "learning_rate": 5.433e-06, "loss": 0.154, "step": 1814 }, { "epoch": 3.349030470914127, "grad_norm": 1.7652519941329956, "learning_rate": 5.436e-06, "loss": 0.1191, "step": 1815 }, { "epoch": 3.3508771929824563, "grad_norm": 1.646363615989685, "learning_rate": 5.439e-06, "loss": 0.1366, "step": 1816 }, { "epoch": 3.352723915050785, "grad_norm": 2.438486337661743, "learning_rate": 5.442000000000001e-06, "loss": 0.1325, "step": 1817 }, { "epoch": 3.3545706371191137, "grad_norm": 1.6104596853256226, "learning_rate": 5.445e-06, "loss": 0.1316, "step": 1818 }, { "epoch": 3.3564173591874424, "grad_norm": 1.6542346477508545, "learning_rate": 5.448e-06, "loss": 0.1435, "step": 1819 }, { "epoch": 3.358264081255771, "grad_norm": 1.6384179592132568, "learning_rate": 5.451e-06, "loss": 0.1408, "step": 1820 }, { "epoch": 3.3601108033241, "grad_norm": 2.035884380340576, "learning_rate": 5.454e-06, "loss": 0.1251, "step": 1821 }, { "epoch": 3.3619575253924285, "grad_norm": 2.1199872493743896, "learning_rate": 5.4570000000000004e-06, "loss": 0.1318, "step": 1822 }, { "epoch": 3.3638042474607572, "grad_norm": 2.0500221252441406, "learning_rate": 5.46e-06, "loss": 0.1293, "step": 1823 }, { "epoch": 3.365650969529086, "grad_norm": 1.677475094795227, "learning_rate": 5.463000000000001e-06, "loss": 0.2024, "step": 1824 }, { "epoch": 3.3674976915974146, "grad_norm": 1.9693593978881836, "learning_rate": 5.466e-06, "loss": 0.1779, "step": 1825 }, { "epoch": 3.3693444136657433, "grad_norm": 1.5180773735046387, "learning_rate": 5.469e-06, "loss": 0.1339, "step": 1826 }, { "epoch": 3.371191135734072, "grad_norm": 2.3797709941864014, "learning_rate": 5.472e-06, "loss": 0.589, "step": 1827 }, { "epoch": 3.3730378578024007, "grad_norm": 2.1648378372192383, "learning_rate": 5.475e-06, "loss": 0.5639, "step": 1828 }, { "epoch": 3.3748845798707294, "grad_norm": 1.9987053871154785, "learning_rate": 5.478000000000001e-06, "loss": 0.4686, "step": 1829 }, { "epoch": 3.376731301939058, "grad_norm": 2.010835886001587, "learning_rate": 5.4810000000000005e-06, "loss": 0.4747, "step": 1830 }, { "epoch": 3.378578024007387, "grad_norm": 1.4874591827392578, "learning_rate": 5.4839999999999995e-06, "loss": 0.3482, "step": 1831 }, { "epoch": 3.3804247460757155, "grad_norm": 1.4924468994140625, "learning_rate": 5.487e-06, "loss": 0.3022, "step": 1832 }, { "epoch": 3.3822714681440442, "grad_norm": 1.5955424308776855, "learning_rate": 5.49e-06, "loss": 0.3013, "step": 1833 }, { "epoch": 3.384118190212373, "grad_norm": 1.8055914640426636, "learning_rate": 5.493000000000001e-06, "loss": 0.4244, "step": 1834 }, { "epoch": 3.3859649122807016, "grad_norm": 1.6258183717727661, "learning_rate": 5.496e-06, "loss": 0.3016, "step": 1835 }, { "epoch": 3.3878116343490303, "grad_norm": 1.7717417478561401, "learning_rate": 5.499e-06, "loss": 0.3108, "step": 1836 }, { "epoch": 3.389658356417359, "grad_norm": 2.136167049407959, "learning_rate": 5.502e-06, "loss": 0.2754, "step": 1837 }, { "epoch": 3.3915050784856877, "grad_norm": 1.1560344696044922, "learning_rate": 5.505e-06, "loss": 0.193, "step": 1838 }, { "epoch": 3.393351800554017, "grad_norm": 1.2215458154678345, "learning_rate": 5.5080000000000005e-06, "loss": 0.1924, "step": 1839 }, { "epoch": 3.395198522622345, "grad_norm": 1.3697618246078491, "learning_rate": 5.511e-06, "loss": 0.1704, "step": 1840 }, { "epoch": 3.3970452446906743, "grad_norm": 1.5075842142105103, "learning_rate": 5.514e-06, "loss": 0.1752, "step": 1841 }, { "epoch": 3.398891966759003, "grad_norm": 1.186893343925476, "learning_rate": 5.517000000000001e-06, "loss": 0.1683, "step": 1842 }, { "epoch": 3.4007386888273317, "grad_norm": 1.2870360612869263, "learning_rate": 5.52e-06, "loss": 0.1104, "step": 1843 }, { "epoch": 3.4025854108956604, "grad_norm": 1.3535423278808594, "learning_rate": 5.523e-06, "loss": 0.1625, "step": 1844 }, { "epoch": 3.404432132963989, "grad_norm": 0.9633287191390991, "learning_rate": 5.526e-06, "loss": 0.1042, "step": 1845 }, { "epoch": 3.4062788550323178, "grad_norm": 1.3369865417480469, "learning_rate": 5.529e-06, "loss": 0.1994, "step": 1846 }, { "epoch": 3.4081255771006465, "grad_norm": 1.342253565788269, "learning_rate": 5.5320000000000006e-06, "loss": 0.1389, "step": 1847 }, { "epoch": 3.409972299168975, "grad_norm": 2.2483065128326416, "learning_rate": 5.535e-06, "loss": 0.1297, "step": 1848 }, { "epoch": 3.411819021237304, "grad_norm": 1.504280686378479, "learning_rate": 5.537999999999999e-06, "loss": 0.1155, "step": 1849 }, { "epoch": 3.4136657433056325, "grad_norm": 1.8840082883834839, "learning_rate": 5.541e-06, "loss": 0.1765, "step": 1850 }, { "epoch": 3.4155124653739612, "grad_norm": 1.5378448963165283, "learning_rate": 5.544e-06, "loss": 0.1434, "step": 1851 }, { "epoch": 3.41735918744229, "grad_norm": 1.1429051160812378, "learning_rate": 5.547e-06, "loss": 0.1191, "step": 1852 }, { "epoch": 3.4192059095106186, "grad_norm": 1.4453113079071045, "learning_rate": 5.55e-06, "loss": 0.089, "step": 1853 }, { "epoch": 3.4210526315789473, "grad_norm": 1.3558111190795898, "learning_rate": 5.553e-06, "loss": 0.1171, "step": 1854 }, { "epoch": 3.422899353647276, "grad_norm": 3.135983943939209, "learning_rate": 5.556e-06, "loss": 0.1268, "step": 1855 }, { "epoch": 3.4247460757156047, "grad_norm": 1.6183189153671265, "learning_rate": 5.559e-06, "loss": 0.119, "step": 1856 }, { "epoch": 3.4265927977839334, "grad_norm": 1.4396922588348389, "learning_rate": 5.562e-06, "loss": 0.1557, "step": 1857 }, { "epoch": 3.428439519852262, "grad_norm": 1.64681077003479, "learning_rate": 5.565e-06, "loss": 0.0944, "step": 1858 }, { "epoch": 3.430286241920591, "grad_norm": 1.7854353189468384, "learning_rate": 5.568e-06, "loss": 0.1203, "step": 1859 }, { "epoch": 3.4321329639889195, "grad_norm": 1.3215970993041992, "learning_rate": 5.5710000000000005e-06, "loss": 0.0962, "step": 1860 }, { "epoch": 3.4339796860572482, "grad_norm": 1.485531210899353, "learning_rate": 5.574e-06, "loss": 0.1415, "step": 1861 }, { "epoch": 3.435826408125577, "grad_norm": 1.915353775024414, "learning_rate": 5.577e-06, "loss": 0.1219, "step": 1862 }, { "epoch": 3.4376731301939056, "grad_norm": 5.033260345458984, "learning_rate": 5.58e-06, "loss": 0.1146, "step": 1863 }, { "epoch": 3.439519852262235, "grad_norm": 1.7047919034957886, "learning_rate": 5.583e-06, "loss": 0.1072, "step": 1864 }, { "epoch": 3.441366574330563, "grad_norm": 1.481224536895752, "learning_rate": 5.586e-06, "loss": 0.1166, "step": 1865 }, { "epoch": 3.443213296398892, "grad_norm": 1.5594127178192139, "learning_rate": 5.589e-06, "loss": 0.1386, "step": 1866 }, { "epoch": 3.445060018467221, "grad_norm": 1.7530744075775146, "learning_rate": 5.592000000000001e-06, "loss": 0.1145, "step": 1867 }, { "epoch": 3.4469067405355496, "grad_norm": 1.3681082725524902, "learning_rate": 5.595e-06, "loss": 0.1197, "step": 1868 }, { "epoch": 3.4487534626038783, "grad_norm": 1.802425503730774, "learning_rate": 5.598e-06, "loss": 0.1206, "step": 1869 }, { "epoch": 3.450600184672207, "grad_norm": 1.789586067199707, "learning_rate": 5.601e-06, "loss": 0.1961, "step": 1870 }, { "epoch": 3.4524469067405357, "grad_norm": 1.5348275899887085, "learning_rate": 5.604e-06, "loss": 0.1543, "step": 1871 }, { "epoch": 3.4542936288088644, "grad_norm": 1.5526305437088013, "learning_rate": 5.607000000000001e-06, "loss": 0.1006, "step": 1872 }, { "epoch": 3.456140350877193, "grad_norm": 4.2244415283203125, "learning_rate": 5.6100000000000005e-06, "loss": 0.1398, "step": 1873 }, { "epoch": 3.4579870729455218, "grad_norm": 2.305570602416992, "learning_rate": 5.6129999999999995e-06, "loss": 0.1257, "step": 1874 }, { "epoch": 3.4598337950138505, "grad_norm": 1.7376497983932495, "learning_rate": 5.616e-06, "loss": 0.1504, "step": 1875 }, { "epoch": 3.461680517082179, "grad_norm": 3.7213504314422607, "learning_rate": 5.619e-06, "loss": 0.2539, "step": 1876 }, { "epoch": 3.463527239150508, "grad_norm": 3.0193634033203125, "learning_rate": 5.6220000000000006e-06, "loss": 0.6193, "step": 1877 }, { "epoch": 3.4653739612188366, "grad_norm": 2.032485008239746, "learning_rate": 5.625e-06, "loss": 0.3857, "step": 1878 }, { "epoch": 3.4672206832871653, "grad_norm": 1.407131314277649, "learning_rate": 5.628e-06, "loss": 0.455, "step": 1879 }, { "epoch": 3.469067405355494, "grad_norm": 1.7298333644866943, "learning_rate": 5.631e-06, "loss": 0.4029, "step": 1880 }, { "epoch": 3.4709141274238227, "grad_norm": 1.9133267402648926, "learning_rate": 5.634e-06, "loss": 0.438, "step": 1881 }, { "epoch": 3.4727608494921514, "grad_norm": 1.6703765392303467, "learning_rate": 5.637e-06, "loss": 0.4298, "step": 1882 }, { "epoch": 3.47460757156048, "grad_norm": 1.2871016263961792, "learning_rate": 5.64e-06, "loss": 0.3063, "step": 1883 }, { "epoch": 3.4764542936288088, "grad_norm": 2.3849496841430664, "learning_rate": 5.643e-06, "loss": 0.4411, "step": 1884 }, { "epoch": 3.4783010156971375, "grad_norm": 1.4804643392562866, "learning_rate": 5.646000000000001e-06, "loss": 0.27, "step": 1885 }, { "epoch": 3.480147737765466, "grad_norm": 2.092325210571289, "learning_rate": 5.649e-06, "loss": 0.2392, "step": 1886 }, { "epoch": 3.481994459833795, "grad_norm": 1.2303647994995117, "learning_rate": 5.652e-06, "loss": 0.1759, "step": 1887 }, { "epoch": 3.4838411819021236, "grad_norm": 1.8435465097427368, "learning_rate": 5.655e-06, "loss": 0.2126, "step": 1888 }, { "epoch": 3.4856879039704527, "grad_norm": 1.3646825551986694, "learning_rate": 5.658e-06, "loss": 0.213, "step": 1889 }, { "epoch": 3.487534626038781, "grad_norm": 1.2749691009521484, "learning_rate": 5.6610000000000005e-06, "loss": 0.1466, "step": 1890 }, { "epoch": 3.48938134810711, "grad_norm": 1.3183910846710205, "learning_rate": 5.664e-06, "loss": 0.1894, "step": 1891 }, { "epoch": 3.4912280701754383, "grad_norm": 1.5395432710647583, "learning_rate": 5.667e-06, "loss": 0.1734, "step": 1892 }, { "epoch": 3.4930747922437675, "grad_norm": 2.1787004470825195, "learning_rate": 5.67e-06, "loss": 0.1379, "step": 1893 }, { "epoch": 3.494921514312096, "grad_norm": 1.3665242195129395, "learning_rate": 5.673e-06, "loss": 0.1335, "step": 1894 }, { "epoch": 3.496768236380425, "grad_norm": 5.0550103187561035, "learning_rate": 5.676e-06, "loss": 0.1146, "step": 1895 }, { "epoch": 3.4986149584487536, "grad_norm": 2.3658273220062256, "learning_rate": 5.679e-06, "loss": 0.1421, "step": 1896 }, { "epoch": 3.5004616805170823, "grad_norm": 1.0035806894302368, "learning_rate": 5.682000000000001e-06, "loss": 0.1141, "step": 1897 }, { "epoch": 3.502308402585411, "grad_norm": 6.410629749298096, "learning_rate": 5.685e-06, "loss": 0.1171, "step": 1898 }, { "epoch": 3.5041551246537397, "grad_norm": 1.1340585947036743, "learning_rate": 5.688e-06, "loss": 0.1112, "step": 1899 }, { "epoch": 3.5060018467220684, "grad_norm": 1.2341041564941406, "learning_rate": 5.691e-06, "loss": 0.129, "step": 1900 }, { "epoch": 3.507848568790397, "grad_norm": 2.3341636657714844, "learning_rate": 5.694e-06, "loss": 0.116, "step": 1901 }, { "epoch": 3.509695290858726, "grad_norm": 1.6923784017562866, "learning_rate": 5.697000000000001e-06, "loss": 0.1143, "step": 1902 }, { "epoch": 3.5115420129270545, "grad_norm": 2.5729382038116455, "learning_rate": 5.7000000000000005e-06, "loss": 0.1099, "step": 1903 }, { "epoch": 3.513388734995383, "grad_norm": 1.2444424629211426, "learning_rate": 5.703e-06, "loss": 0.1303, "step": 1904 }, { "epoch": 3.515235457063712, "grad_norm": 1.2153400182724, "learning_rate": 5.706e-06, "loss": 0.0941, "step": 1905 }, { "epoch": 3.5170821791320406, "grad_norm": 1.57122004032135, "learning_rate": 5.709e-06, "loss": 0.1996, "step": 1906 }, { "epoch": 3.5189289012003693, "grad_norm": 1.2429941892623901, "learning_rate": 5.7120000000000005e-06, "loss": 0.1322, "step": 1907 }, { "epoch": 3.520775623268698, "grad_norm": 1.3662152290344238, "learning_rate": 5.715e-06, "loss": 0.0835, "step": 1908 }, { "epoch": 3.5226223453370267, "grad_norm": 4.294366359710693, "learning_rate": 5.718e-06, "loss": 0.1686, "step": 1909 }, { "epoch": 3.5244690674053554, "grad_norm": 2.29258394241333, "learning_rate": 5.721000000000001e-06, "loss": 0.1044, "step": 1910 }, { "epoch": 3.526315789473684, "grad_norm": 1.248388648033142, "learning_rate": 5.724e-06, "loss": 0.0925, "step": 1911 }, { "epoch": 3.5281625115420128, "grad_norm": 1.540623426437378, "learning_rate": 5.7269999999999995e-06, "loss": 0.1123, "step": 1912 }, { "epoch": 3.5300092336103415, "grad_norm": 2.4571754932403564, "learning_rate": 5.73e-06, "loss": 0.1247, "step": 1913 }, { "epoch": 3.5318559556786706, "grad_norm": 1.2694852352142334, "learning_rate": 5.733e-06, "loss": 0.1746, "step": 1914 }, { "epoch": 3.533702677746999, "grad_norm": 1.9375823736190796, "learning_rate": 5.736000000000001e-06, "loss": 0.1105, "step": 1915 }, { "epoch": 3.535549399815328, "grad_norm": 1.210279107093811, "learning_rate": 5.7390000000000004e-06, "loss": 0.1333, "step": 1916 }, { "epoch": 3.5373961218836563, "grad_norm": 1.4333395957946777, "learning_rate": 5.741999999999999e-06, "loss": 0.1251, "step": 1917 }, { "epoch": 3.5392428439519854, "grad_norm": 1.5113213062286377, "learning_rate": 5.745e-06, "loss": 0.105, "step": 1918 }, { "epoch": 3.541089566020314, "grad_norm": 1.5336154699325562, "learning_rate": 5.748e-06, "loss": 0.1238, "step": 1919 }, { "epoch": 3.542936288088643, "grad_norm": 1.4925528764724731, "learning_rate": 5.7510000000000005e-06, "loss": 0.1156, "step": 1920 }, { "epoch": 3.5447830101569715, "grad_norm": 1.5090351104736328, "learning_rate": 5.754e-06, "loss": 0.1149, "step": 1921 }, { "epoch": 3.5466297322253, "grad_norm": 1.419166922569275, "learning_rate": 5.757e-06, "loss": 0.1477, "step": 1922 }, { "epoch": 3.548476454293629, "grad_norm": 1.3856487274169922, "learning_rate": 5.76e-06, "loss": 0.1166, "step": 1923 }, { "epoch": 3.5503231763619576, "grad_norm": 1.7472511529922485, "learning_rate": 5.763e-06, "loss": 0.146, "step": 1924 }, { "epoch": 3.5521698984302863, "grad_norm": 1.4180855751037598, "learning_rate": 5.766e-06, "loss": 0.1507, "step": 1925 }, { "epoch": 3.554016620498615, "grad_norm": 2.02449107170105, "learning_rate": 5.769e-06, "loss": 0.1547, "step": 1926 }, { "epoch": 3.5558633425669437, "grad_norm": 4.551610469818115, "learning_rate": 5.772e-06, "loss": 0.5175, "step": 1927 }, { "epoch": 3.5577100646352724, "grad_norm": 1.604357123374939, "learning_rate": 5.775000000000001e-06, "loss": 0.5595, "step": 1928 }, { "epoch": 3.559556786703601, "grad_norm": 1.855041265487671, "learning_rate": 5.7779999999999996e-06, "loss": 0.4088, "step": 1929 }, { "epoch": 3.56140350877193, "grad_norm": 1.8530044555664062, "learning_rate": 5.781e-06, "loss": 0.3441, "step": 1930 }, { "epoch": 3.5632502308402585, "grad_norm": 2.204385280609131, "learning_rate": 5.784e-06, "loss": 0.5021, "step": 1931 }, { "epoch": 3.565096952908587, "grad_norm": 2.2241737842559814, "learning_rate": 5.787e-06, "loss": 0.3674, "step": 1932 }, { "epoch": 3.566943674976916, "grad_norm": 1.4868899583816528, "learning_rate": 5.7900000000000005e-06, "loss": 0.297, "step": 1933 }, { "epoch": 3.5687903970452446, "grad_norm": 2.145841121673584, "learning_rate": 5.793e-06, "loss": 0.292, "step": 1934 }, { "epoch": 3.5706371191135733, "grad_norm": 1.8769104480743408, "learning_rate": 5.796e-06, "loss": 0.263, "step": 1935 }, { "epoch": 3.572483841181902, "grad_norm": 4.806507110595703, "learning_rate": 5.799e-06, "loss": 0.3297, "step": 1936 }, { "epoch": 3.5743305632502307, "grad_norm": 1.6606978178024292, "learning_rate": 5.802e-06, "loss": 0.2308, "step": 1937 }, { "epoch": 3.5761772853185594, "grad_norm": 1.4710232019424438, "learning_rate": 5.805e-06, "loss": 0.2831, "step": 1938 }, { "epoch": 3.5780240073868885, "grad_norm": 2.6567530632019043, "learning_rate": 5.808e-06, "loss": 0.2106, "step": 1939 }, { "epoch": 3.579870729455217, "grad_norm": 1.5181427001953125, "learning_rate": 5.811000000000001e-06, "loss": 0.2107, "step": 1940 }, { "epoch": 3.581717451523546, "grad_norm": 0.9983479976654053, "learning_rate": 5.814e-06, "loss": 0.1584, "step": 1941 }, { "epoch": 3.583564173591874, "grad_norm": 1.196229338645935, "learning_rate": 5.8169999999999995e-06, "loss": 0.1357, "step": 1942 }, { "epoch": 3.5854108956602033, "grad_norm": 1.343436598777771, "learning_rate": 5.82e-06, "loss": 0.1194, "step": 1943 }, { "epoch": 3.587257617728532, "grad_norm": 1.8224588632583618, "learning_rate": 5.823e-06, "loss": 0.1649, "step": 1944 }, { "epoch": 3.5891043397968607, "grad_norm": 1.064284324645996, "learning_rate": 5.826000000000001e-06, "loss": 0.106, "step": 1945 }, { "epoch": 3.5909510618651894, "grad_norm": 1.1194217205047607, "learning_rate": 5.8290000000000004e-06, "loss": 0.1177, "step": 1946 }, { "epoch": 3.592797783933518, "grad_norm": 1.6041698455810547, "learning_rate": 5.832e-06, "loss": 0.133, "step": 1947 }, { "epoch": 3.594644506001847, "grad_norm": 1.0496641397476196, "learning_rate": 5.835e-06, "loss": 0.0764, "step": 1948 }, { "epoch": 3.5964912280701755, "grad_norm": 1.0636441707611084, "learning_rate": 5.838e-06, "loss": 0.1288, "step": 1949 }, { "epoch": 3.598337950138504, "grad_norm": 1.3821836709976196, "learning_rate": 5.8410000000000005e-06, "loss": 0.1299, "step": 1950 }, { "epoch": 3.600184672206833, "grad_norm": 1.1364189386367798, "learning_rate": 5.844e-06, "loss": 0.1114, "step": 1951 }, { "epoch": 3.6020313942751616, "grad_norm": 1.2574251890182495, "learning_rate": 5.847e-06, "loss": 0.1286, "step": 1952 }, { "epoch": 3.6038781163434903, "grad_norm": 1.209403157234192, "learning_rate": 5.850000000000001e-06, "loss": 0.1498, "step": 1953 }, { "epoch": 3.605724838411819, "grad_norm": 1.3319051265716553, "learning_rate": 5.853e-06, "loss": 0.1073, "step": 1954 }, { "epoch": 3.6075715604801477, "grad_norm": 1.2135307788848877, "learning_rate": 5.856e-06, "loss": 0.106, "step": 1955 }, { "epoch": 3.6094182825484764, "grad_norm": 1.4304325580596924, "learning_rate": 5.859e-06, "loss": 0.1141, "step": 1956 }, { "epoch": 3.611265004616805, "grad_norm": 5.753938674926758, "learning_rate": 5.862e-06, "loss": 0.1105, "step": 1957 }, { "epoch": 3.613111726685134, "grad_norm": 2.1282248497009277, "learning_rate": 5.865000000000001e-06, "loss": 0.1175, "step": 1958 }, { "epoch": 3.6149584487534625, "grad_norm": 1.8825041055679321, "learning_rate": 5.868e-06, "loss": 0.1184, "step": 1959 }, { "epoch": 3.616805170821791, "grad_norm": 1.0043890476226807, "learning_rate": 5.871e-06, "loss": 0.0976, "step": 1960 }, { "epoch": 3.61865189289012, "grad_norm": 2.1982343196868896, "learning_rate": 5.874e-06, "loss": 0.0953, "step": 1961 }, { "epoch": 3.6204986149584486, "grad_norm": 1.6441147327423096, "learning_rate": 5.877e-06, "loss": 0.1208, "step": 1962 }, { "epoch": 3.6223453370267773, "grad_norm": 1.4551324844360352, "learning_rate": 5.8800000000000005e-06, "loss": 0.1104, "step": 1963 }, { "epoch": 3.6241920590951064, "grad_norm": 1.2573660612106323, "learning_rate": 5.883e-06, "loss": 0.1104, "step": 1964 }, { "epoch": 3.6260387811634347, "grad_norm": 1.6478545665740967, "learning_rate": 5.886000000000001e-06, "loss": 0.1119, "step": 1965 }, { "epoch": 3.627885503231764, "grad_norm": 1.328403115272522, "learning_rate": 5.889e-06, "loss": 0.1182, "step": 1966 }, { "epoch": 3.629732225300092, "grad_norm": 2.832441806793213, "learning_rate": 5.892e-06, "loss": 0.1433, "step": 1967 }, { "epoch": 3.6315789473684212, "grad_norm": 1.5062068700790405, "learning_rate": 5.895e-06, "loss": 0.0838, "step": 1968 }, { "epoch": 3.6334256694367495, "grad_norm": 1.5999783277511597, "learning_rate": 5.898e-06, "loss": 0.0968, "step": 1969 }, { "epoch": 3.6352723915050786, "grad_norm": 1.426970362663269, "learning_rate": 5.901000000000001e-06, "loss": 0.1154, "step": 1970 }, { "epoch": 3.6371191135734073, "grad_norm": 3.131009340286255, "learning_rate": 5.9040000000000006e-06, "loss": 0.1042, "step": 1971 }, { "epoch": 3.638965835641736, "grad_norm": 1.6253173351287842, "learning_rate": 5.9069999999999995e-06, "loss": 0.1047, "step": 1972 }, { "epoch": 3.6408125577100647, "grad_norm": 1.677268385887146, "learning_rate": 5.91e-06, "loss": 0.115, "step": 1973 }, { "epoch": 3.6426592797783934, "grad_norm": 2.1825778484344482, "learning_rate": 5.913e-06, "loss": 0.1139, "step": 1974 }, { "epoch": 3.644506001846722, "grad_norm": 2.2541301250457764, "learning_rate": 5.916e-06, "loss": 0.1551, "step": 1975 }, { "epoch": 3.646352723915051, "grad_norm": 1.7591955661773682, "learning_rate": 5.919e-06, "loss": 0.2356, "step": 1976 }, { "epoch": 3.6481994459833795, "grad_norm": 2.3737945556640625, "learning_rate": 5.922e-06, "loss": 0.5899, "step": 1977 }, { "epoch": 3.6500461680517082, "grad_norm": 1.9221827983856201, "learning_rate": 5.925e-06, "loss": 0.4634, "step": 1978 }, { "epoch": 3.651892890120037, "grad_norm": 2.4015867710113525, "learning_rate": 5.928e-06, "loss": 0.4463, "step": 1979 }, { "epoch": 3.6537396121883656, "grad_norm": 4.889469146728516, "learning_rate": 5.931e-06, "loss": 0.426, "step": 1980 }, { "epoch": 3.6555863342566943, "grad_norm": 2.2621586322784424, "learning_rate": 5.934e-06, "loss": 0.4606, "step": 1981 }, { "epoch": 3.657433056325023, "grad_norm": 1.894276738166809, "learning_rate": 5.937e-06, "loss": 0.3655, "step": 1982 }, { "epoch": 3.6592797783933517, "grad_norm": 3.954047203063965, "learning_rate": 5.940000000000001e-06, "loss": 0.3543, "step": 1983 }, { "epoch": 3.6611265004616804, "grad_norm": 1.4562526941299438, "learning_rate": 5.943e-06, "loss": 0.3325, "step": 1984 }, { "epoch": 3.662973222530009, "grad_norm": 2.2910425662994385, "learning_rate": 5.9459999999999995e-06, "loss": 0.3744, "step": 1985 }, { "epoch": 3.664819944598338, "grad_norm": 1.5386607646942139, "learning_rate": 5.949e-06, "loss": 0.2899, "step": 1986 }, { "epoch": 3.6666666666666665, "grad_norm": 1.333303451538086, "learning_rate": 5.952e-06, "loss": 0.2228, "step": 1987 }, { "epoch": 3.668513388734995, "grad_norm": 3.049942970275879, "learning_rate": 5.955000000000001e-06, "loss": 0.2197, "step": 1988 }, { "epoch": 3.6703601108033244, "grad_norm": 2.74458384513855, "learning_rate": 5.958e-06, "loss": 0.2569, "step": 1989 }, { "epoch": 3.6722068328716526, "grad_norm": 1.1316044330596924, "learning_rate": 5.961e-06, "loss": 0.1435, "step": 1990 }, { "epoch": 3.6740535549399818, "grad_norm": 1.435760498046875, "learning_rate": 5.964e-06, "loss": 0.165, "step": 1991 }, { "epoch": 3.67590027700831, "grad_norm": 1.0853261947631836, "learning_rate": 5.967e-06, "loss": 0.0853, "step": 1992 }, { "epoch": 3.677746999076639, "grad_norm": 1.67013418674469, "learning_rate": 5.9700000000000004e-06, "loss": 0.2136, "step": 1993 }, { "epoch": 3.6795937211449674, "grad_norm": 1.1271677017211914, "learning_rate": 5.973e-06, "loss": 0.0932, "step": 1994 }, { "epoch": 3.6814404432132966, "grad_norm": 1.1513522863388062, "learning_rate": 5.976e-06, "loss": 0.1335, "step": 1995 }, { "epoch": 3.6832871652816253, "grad_norm": 1.074549674987793, "learning_rate": 5.979000000000001e-06, "loss": 0.0735, "step": 1996 }, { "epoch": 3.685133887349954, "grad_norm": 1.177593469619751, "learning_rate": 5.982e-06, "loss": 0.2005, "step": 1997 }, { "epoch": 3.6869806094182827, "grad_norm": 1.4211703538894653, "learning_rate": 5.985e-06, "loss": 0.143, "step": 1998 }, { "epoch": 3.6888273314866113, "grad_norm": 1.6892846822738647, "learning_rate": 5.988e-06, "loss": 0.1164, "step": 1999 }, { "epoch": 3.69067405355494, "grad_norm": 1.2731127738952637, "learning_rate": 5.991e-06, "loss": 0.092, "step": 2000 }, { "epoch": 3.69067405355494, "eval_cer": 0.15273349405085931, "eval_loss": 0.5228983163833618, "eval_runtime": 15.933, "eval_samples_per_second": 19.08, "eval_steps_per_second": 0.628, "eval_wer": 0.5397160399079048, "step": 2000 }, { "epoch": 3.6925207756232687, "grad_norm": 1.0816723108291626, "learning_rate": 5.9940000000000005e-06, "loss": 0.1102, "step": 2001 }, { "epoch": 3.6943674976915974, "grad_norm": 1.175575852394104, "learning_rate": 5.997e-06, "loss": 0.1103, "step": 2002 }, { "epoch": 3.696214219759926, "grad_norm": 1.5876294374465942, "learning_rate": 6e-06, "loss": 0.1206, "step": 2003 }, { "epoch": 3.698060941828255, "grad_norm": 1.339050054550171, "learning_rate": 6.003e-06, "loss": 0.0987, "step": 2004 }, { "epoch": 3.6999076638965835, "grad_norm": 2.4920217990875244, "learning_rate": 6.006e-06, "loss": 0.0947, "step": 2005 }, { "epoch": 3.7017543859649122, "grad_norm": 1.912536859512329, "learning_rate": 6.009e-06, "loss": 0.1419, "step": 2006 }, { "epoch": 3.703601108033241, "grad_norm": 1.2025752067565918, "learning_rate": 6.012e-06, "loss": 0.1129, "step": 2007 }, { "epoch": 3.7054478301015696, "grad_norm": 3.4578535556793213, "learning_rate": 6.015000000000001e-06, "loss": 0.1331, "step": 2008 }, { "epoch": 3.7072945521698983, "grad_norm": 1.6533622741699219, "learning_rate": 6.018e-06, "loss": 0.1541, "step": 2009 }, { "epoch": 3.709141274238227, "grad_norm": 1.2792508602142334, "learning_rate": 6.021e-06, "loss": 0.0943, "step": 2010 }, { "epoch": 3.7109879963065557, "grad_norm": 1.6220040321350098, "learning_rate": 6.024e-06, "loss": 0.1202, "step": 2011 }, { "epoch": 3.7128347183748844, "grad_norm": 1.363448143005371, "learning_rate": 6.027e-06, "loss": 0.1145, "step": 2012 }, { "epoch": 3.714681440443213, "grad_norm": 1.4746122360229492, "learning_rate": 6.030000000000001e-06, "loss": 0.083, "step": 2013 }, { "epoch": 3.7165281625115423, "grad_norm": 1.3242028951644897, "learning_rate": 6.0330000000000005e-06, "loss": 0.0927, "step": 2014 }, { "epoch": 3.7183748845798705, "grad_norm": 1.3828983306884766, "learning_rate": 6.0359999999999995e-06, "loss": 0.112, "step": 2015 }, { "epoch": 3.7202216066481997, "grad_norm": 1.7886730432510376, "learning_rate": 6.039e-06, "loss": 0.1225, "step": 2016 }, { "epoch": 3.722068328716528, "grad_norm": 4.497995853424072, "learning_rate": 6.042e-06, "loss": 0.0945, "step": 2017 }, { "epoch": 3.723915050784857, "grad_norm": 1.2448220252990723, "learning_rate": 6.0450000000000006e-06, "loss": 0.0997, "step": 2018 }, { "epoch": 3.7257617728531853, "grad_norm": 1.299957513809204, "learning_rate": 6.048e-06, "loss": 0.1073, "step": 2019 }, { "epoch": 3.7276084949215145, "grad_norm": 1.221881628036499, "learning_rate": 6.051e-06, "loss": 0.1206, "step": 2020 }, { "epoch": 3.729455216989843, "grad_norm": 1.9921565055847168, "learning_rate": 6.054e-06, "loss": 0.1125, "step": 2021 }, { "epoch": 3.731301939058172, "grad_norm": 1.4520527124404907, "learning_rate": 6.057e-06, "loss": 0.0999, "step": 2022 }, { "epoch": 3.7331486611265006, "grad_norm": 1.3707276582717896, "learning_rate": 6.0600000000000004e-06, "loss": 0.1016, "step": 2023 }, { "epoch": 3.7349953831948293, "grad_norm": 1.359123945236206, "learning_rate": 6.063e-06, "loss": 0.1095, "step": 2024 }, { "epoch": 3.736842105263158, "grad_norm": 3.0324485301971436, "learning_rate": 6.066e-06, "loss": 0.143, "step": 2025 }, { "epoch": 3.7386888273314867, "grad_norm": 2.0628585815429688, "learning_rate": 6.069000000000001e-06, "loss": 0.1456, "step": 2026 }, { "epoch": 3.7405355493998154, "grad_norm": 5.545530319213867, "learning_rate": 6.072e-06, "loss": 0.5264, "step": 2027 }, { "epoch": 3.742382271468144, "grad_norm": 2.2468793392181396, "learning_rate": 6.075e-06, "loss": 0.542, "step": 2028 }, { "epoch": 3.7442289935364728, "grad_norm": 1.6987916231155396, "learning_rate": 6.078e-06, "loss": 0.3783, "step": 2029 }, { "epoch": 3.7460757156048015, "grad_norm": 1.487799882888794, "learning_rate": 6.081e-06, "loss": 0.426, "step": 2030 }, { "epoch": 3.74792243767313, "grad_norm": 1.8354527950286865, "learning_rate": 6.0840000000000005e-06, "loss": 0.3765, "step": 2031 }, { "epoch": 3.749769159741459, "grad_norm": 1.3133721351623535, "learning_rate": 6.087e-06, "loss": 0.2697, "step": 2032 }, { "epoch": 3.7516158818097876, "grad_norm": 1.655531406402588, "learning_rate": 6.090000000000001e-06, "loss": 0.4274, "step": 2033 }, { "epoch": 3.7534626038781163, "grad_norm": 1.0694929361343384, "learning_rate": 6.093e-06, "loss": 0.3198, "step": 2034 }, { "epoch": 3.755309325946445, "grad_norm": 1.8194829225540161, "learning_rate": 6.096e-06, "loss": 0.3143, "step": 2035 }, { "epoch": 3.7571560480147737, "grad_norm": 1.8747495412826538, "learning_rate": 6.099e-06, "loss": 0.2571, "step": 2036 }, { "epoch": 3.7590027700831024, "grad_norm": 1.4677729606628418, "learning_rate": 6.102e-06, "loss": 0.2237, "step": 2037 }, { "epoch": 3.760849492151431, "grad_norm": 1.2673132419586182, "learning_rate": 6.105e-06, "loss": 0.2766, "step": 2038 }, { "epoch": 3.76269621421976, "grad_norm": 2.505880117416382, "learning_rate": 6.108000000000001e-06, "loss": 0.3313, "step": 2039 }, { "epoch": 3.7645429362880884, "grad_norm": 2.244453191757202, "learning_rate": 6.111e-06, "loss": 0.1818, "step": 2040 }, { "epoch": 3.7663896583564176, "grad_norm": 1.4449100494384766, "learning_rate": 6.114e-06, "loss": 0.1676, "step": 2041 }, { "epoch": 3.768236380424746, "grad_norm": 1.4164530038833618, "learning_rate": 6.117e-06, "loss": 0.128, "step": 2042 }, { "epoch": 3.770083102493075, "grad_norm": 1.2722538709640503, "learning_rate": 6.12e-06, "loss": 0.1526, "step": 2043 }, { "epoch": 3.7719298245614032, "grad_norm": 1.0496044158935547, "learning_rate": 6.1230000000000005e-06, "loss": 0.1081, "step": 2044 }, { "epoch": 3.7737765466297324, "grad_norm": 0.9792291522026062, "learning_rate": 6.126e-06, "loss": 0.1203, "step": 2045 }, { "epoch": 3.775623268698061, "grad_norm": 0.8785455822944641, "learning_rate": 6.129e-06, "loss": 0.0942, "step": 2046 }, { "epoch": 3.77746999076639, "grad_norm": 2.594404935836792, "learning_rate": 6.132e-06, "loss": 0.1138, "step": 2047 }, { "epoch": 3.7793167128347185, "grad_norm": 1.3891233205795288, "learning_rate": 6.135e-06, "loss": 0.1122, "step": 2048 }, { "epoch": 3.781163434903047, "grad_norm": 1.6807653903961182, "learning_rate": 6.138e-06, "loss": 0.1289, "step": 2049 }, { "epoch": 3.783010156971376, "grad_norm": 1.5894526243209839, "learning_rate": 6.141e-06, "loss": 0.1294, "step": 2050 }, { "epoch": 3.7848568790397046, "grad_norm": 1.5388537645339966, "learning_rate": 6.144000000000001e-06, "loss": 0.1347, "step": 2051 }, { "epoch": 3.7867036011080333, "grad_norm": 1.0728822946548462, "learning_rate": 6.147e-06, "loss": 0.0983, "step": 2052 }, { "epoch": 3.788550323176362, "grad_norm": 1.2530133724212646, "learning_rate": 6.1499999999999996e-06, "loss": 0.1288, "step": 2053 }, { "epoch": 3.7903970452446907, "grad_norm": 1.2521809339523315, "learning_rate": 6.153e-06, "loss": 0.101, "step": 2054 }, { "epoch": 3.7922437673130194, "grad_norm": 1.032920002937317, "learning_rate": 6.156e-06, "loss": 0.0842, "step": 2055 }, { "epoch": 3.794090489381348, "grad_norm": 1.201108694076538, "learning_rate": 6.159000000000001e-06, "loss": 0.0904, "step": 2056 }, { "epoch": 3.7959372114496768, "grad_norm": 1.3609286546707153, "learning_rate": 6.1620000000000005e-06, "loss": 0.0745, "step": 2057 }, { "epoch": 3.7977839335180055, "grad_norm": 1.2280689477920532, "learning_rate": 6.164999999999999e-06, "loss": 0.1081, "step": 2058 }, { "epoch": 3.799630655586334, "grad_norm": 1.2103352546691895, "learning_rate": 6.168e-06, "loss": 0.1182, "step": 2059 }, { "epoch": 3.801477377654663, "grad_norm": 1.2055014371871948, "learning_rate": 6.171e-06, "loss": 0.1004, "step": 2060 }, { "epoch": 3.8033240997229916, "grad_norm": 1.1704323291778564, "learning_rate": 6.1740000000000005e-06, "loss": 0.1044, "step": 2061 }, { "epoch": 3.8051708217913203, "grad_norm": 1.461685061454773, "learning_rate": 6.177e-06, "loss": 0.0929, "step": 2062 }, { "epoch": 3.807017543859649, "grad_norm": 1.297804594039917, "learning_rate": 6.18e-06, "loss": 0.1194, "step": 2063 }, { "epoch": 3.808864265927978, "grad_norm": 2.321072816848755, "learning_rate": 6.183e-06, "loss": 0.1522, "step": 2064 }, { "epoch": 3.8107109879963064, "grad_norm": 1.7555663585662842, "learning_rate": 6.186e-06, "loss": 0.0904, "step": 2065 }, { "epoch": 3.8125577100646355, "grad_norm": 1.1725404262542725, "learning_rate": 6.189e-06, "loss": 0.1416, "step": 2066 }, { "epoch": 3.8144044321329638, "grad_norm": 1.6720110177993774, "learning_rate": 6.192e-06, "loss": 0.1405, "step": 2067 }, { "epoch": 3.816251154201293, "grad_norm": 1.382629632949829, "learning_rate": 6.195e-06, "loss": 0.0963, "step": 2068 }, { "epoch": 3.818097876269621, "grad_norm": 1.3463071584701538, "learning_rate": 6.198000000000001e-06, "loss": 0.0951, "step": 2069 }, { "epoch": 3.8199445983379503, "grad_norm": 1.4168822765350342, "learning_rate": 6.201e-06, "loss": 0.0903, "step": 2070 }, { "epoch": 3.821791320406279, "grad_norm": 1.5806570053100586, "learning_rate": 6.204e-06, "loss": 0.0922, "step": 2071 }, { "epoch": 3.8236380424746077, "grad_norm": 1.9736270904541016, "learning_rate": 6.207e-06, "loss": 0.1037, "step": 2072 }, { "epoch": 3.8254847645429364, "grad_norm": 1.1318566799163818, "learning_rate": 6.21e-06, "loss": 0.0986, "step": 2073 }, { "epoch": 3.827331486611265, "grad_norm": 1.464217185974121, "learning_rate": 6.2130000000000005e-06, "loss": 0.1275, "step": 2074 }, { "epoch": 3.829178208679594, "grad_norm": 2.048211097717285, "learning_rate": 6.216e-06, "loss": 0.132, "step": 2075 }, { "epoch": 3.8310249307479225, "grad_norm": 2.4093711376190186, "learning_rate": 6.219000000000001e-06, "loss": 0.1212, "step": 2076 }, { "epoch": 3.832871652816251, "grad_norm": 1.6214300394058228, "learning_rate": 6.222e-06, "loss": 0.4919, "step": 2077 }, { "epoch": 3.83471837488458, "grad_norm": 1.7599939107894897, "learning_rate": 6.225e-06, "loss": 0.3767, "step": 2078 }, { "epoch": 3.8365650969529086, "grad_norm": 3.717496871948242, "learning_rate": 6.228e-06, "loss": 0.4679, "step": 2079 }, { "epoch": 3.8384118190212373, "grad_norm": 1.5492913722991943, "learning_rate": 6.231e-06, "loss": 0.4594, "step": 2080 }, { "epoch": 3.840258541089566, "grad_norm": 1.411643147468567, "learning_rate": 6.234000000000001e-06, "loss": 0.3923, "step": 2081 }, { "epoch": 3.8421052631578947, "grad_norm": 1.8489406108856201, "learning_rate": 6.237000000000001e-06, "loss": 0.2718, "step": 2082 }, { "epoch": 3.8439519852262234, "grad_norm": 1.3735231161117554, "learning_rate": 6.2399999999999995e-06, "loss": 0.3215, "step": 2083 }, { "epoch": 3.845798707294552, "grad_norm": 1.6795915365219116, "learning_rate": 6.243e-06, "loss": 0.2696, "step": 2084 }, { "epoch": 3.847645429362881, "grad_norm": 1.3095409870147705, "learning_rate": 6.246e-06, "loss": 0.3077, "step": 2085 }, { "epoch": 3.8494921514312095, "grad_norm": 1.792952537536621, "learning_rate": 6.249000000000001e-06, "loss": 0.2491, "step": 2086 }, { "epoch": 3.851338873499538, "grad_norm": 1.0679808855056763, "learning_rate": 6.2520000000000004e-06, "loss": 0.2769, "step": 2087 }, { "epoch": 3.853185595567867, "grad_norm": 1.1066974401474, "learning_rate": 6.255e-06, "loss": 0.2073, "step": 2088 }, { "epoch": 3.855032317636196, "grad_norm": 2.68408465385437, "learning_rate": 6.258e-06, "loss": 0.3588, "step": 2089 }, { "epoch": 3.8568790397045243, "grad_norm": 5.881558895111084, "learning_rate": 6.261e-06, "loss": 0.2474, "step": 2090 }, { "epoch": 3.8587257617728534, "grad_norm": 1.1949398517608643, "learning_rate": 6.2640000000000005e-06, "loss": 0.1647, "step": 2091 }, { "epoch": 3.8605724838411817, "grad_norm": 2.1825714111328125, "learning_rate": 6.267e-06, "loss": 0.1624, "step": 2092 }, { "epoch": 3.862419205909511, "grad_norm": 4.6003546714782715, "learning_rate": 6.27e-06, "loss": 0.1243, "step": 2093 }, { "epoch": 3.864265927977839, "grad_norm": 1.0134408473968506, "learning_rate": 6.273000000000001e-06, "loss": 0.1175, "step": 2094 }, { "epoch": 3.866112650046168, "grad_norm": 1.549644112586975, "learning_rate": 6.276e-06, "loss": 0.0978, "step": 2095 }, { "epoch": 3.867959372114497, "grad_norm": 1.3444812297821045, "learning_rate": 6.279e-06, "loss": 0.1493, "step": 2096 }, { "epoch": 3.8698060941828256, "grad_norm": 1.0612382888793945, "learning_rate": 6.282e-06, "loss": 0.1069, "step": 2097 }, { "epoch": 3.8716528162511543, "grad_norm": 1.1147220134735107, "learning_rate": 6.285e-06, "loss": 0.1083, "step": 2098 }, { "epoch": 3.873499538319483, "grad_norm": 1.3620564937591553, "learning_rate": 6.288000000000001e-06, "loss": 0.0893, "step": 2099 }, { "epoch": 3.8753462603878117, "grad_norm": 3.2398104667663574, "learning_rate": 6.291e-06, "loss": 0.1036, "step": 2100 }, { "epoch": 3.8771929824561404, "grad_norm": 1.14120614528656, "learning_rate": 6.293999999999999e-06, "loss": 0.1057, "step": 2101 }, { "epoch": 3.879039704524469, "grad_norm": 1.2740801572799683, "learning_rate": 6.297e-06, "loss": 0.1245, "step": 2102 }, { "epoch": 3.880886426592798, "grad_norm": 1.1721112728118896, "learning_rate": 6.3e-06, "loss": 0.1275, "step": 2103 }, { "epoch": 3.8827331486611265, "grad_norm": 1.4239356517791748, "learning_rate": 6.3030000000000005e-06, "loss": 0.2154, "step": 2104 }, { "epoch": 3.884579870729455, "grad_norm": 1.0123012065887451, "learning_rate": 6.306e-06, "loss": 0.0966, "step": 2105 }, { "epoch": 3.886426592797784, "grad_norm": 1.8538200855255127, "learning_rate": 6.309e-06, "loss": 0.1693, "step": 2106 }, { "epoch": 3.8882733148661126, "grad_norm": 0.9299476742744446, "learning_rate": 6.312e-06, "loss": 0.0891, "step": 2107 }, { "epoch": 3.8901200369344413, "grad_norm": 1.3803143501281738, "learning_rate": 6.315e-06, "loss": 0.1548, "step": 2108 }, { "epoch": 3.89196675900277, "grad_norm": 1.0537395477294922, "learning_rate": 6.318e-06, "loss": 0.072, "step": 2109 }, { "epoch": 3.8938134810710987, "grad_norm": 1.2850028276443481, "learning_rate": 6.321e-06, "loss": 0.0819, "step": 2110 }, { "epoch": 3.8956602031394274, "grad_norm": 1.0578045845031738, "learning_rate": 6.324e-06, "loss": 0.1024, "step": 2111 }, { "epoch": 3.897506925207756, "grad_norm": 1.1398320198059082, "learning_rate": 6.327000000000001e-06, "loss": 0.0915, "step": 2112 }, { "epoch": 3.899353647276085, "grad_norm": 1.2906196117401123, "learning_rate": 6.3299999999999995e-06, "loss": 0.14, "step": 2113 }, { "epoch": 3.901200369344414, "grad_norm": 1.2287468910217285, "learning_rate": 6.333e-06, "loss": 0.0972, "step": 2114 }, { "epoch": 3.903047091412742, "grad_norm": 1.1388696432113647, "learning_rate": 6.336e-06, "loss": 0.0845, "step": 2115 }, { "epoch": 3.9048938134810713, "grad_norm": 1.6295071840286255, "learning_rate": 6.339e-06, "loss": 0.0994, "step": 2116 }, { "epoch": 3.9067405355493996, "grad_norm": 1.361403226852417, "learning_rate": 6.3420000000000004e-06, "loss": 0.094, "step": 2117 }, { "epoch": 3.9085872576177287, "grad_norm": 1.7775616645812988, "learning_rate": 6.345e-06, "loss": 0.1288, "step": 2118 }, { "epoch": 3.910433979686057, "grad_norm": 1.6608407497406006, "learning_rate": 6.348000000000001e-06, "loss": 0.1267, "step": 2119 }, { "epoch": 3.912280701754386, "grad_norm": 2.0776705741882324, "learning_rate": 6.351e-06, "loss": 0.1085, "step": 2120 }, { "epoch": 3.914127423822715, "grad_norm": 1.467939853668213, "learning_rate": 6.354e-06, "loss": 0.114, "step": 2121 }, { "epoch": 3.9159741458910435, "grad_norm": 1.66560697555542, "learning_rate": 6.357e-06, "loss": 0.1166, "step": 2122 }, { "epoch": 3.9178208679593722, "grad_norm": 2.018527030944824, "learning_rate": 6.36e-06, "loss": 0.2089, "step": 2123 }, { "epoch": 3.919667590027701, "grad_norm": 1.6574705839157104, "learning_rate": 6.363000000000001e-06, "loss": 0.0896, "step": 2124 }, { "epoch": 3.9215143120960296, "grad_norm": 1.3335787057876587, "learning_rate": 6.3660000000000005e-06, "loss": 0.1261, "step": 2125 }, { "epoch": 3.9233610341643583, "grad_norm": 1.821389079093933, "learning_rate": 6.3689999999999995e-06, "loss": 0.1744, "step": 2126 }, { "epoch": 3.925207756232687, "grad_norm": 3.1028544902801514, "learning_rate": 6.372e-06, "loss": 0.5456, "step": 2127 }, { "epoch": 3.9270544783010157, "grad_norm": 1.4182378053665161, "learning_rate": 6.375e-06, "loss": 0.4202, "step": 2128 }, { "epoch": 3.9289012003693444, "grad_norm": 1.26549232006073, "learning_rate": 6.378000000000001e-06, "loss": 0.3366, "step": 2129 }, { "epoch": 3.930747922437673, "grad_norm": 1.560637354850769, "learning_rate": 6.381e-06, "loss": 0.3467, "step": 2130 }, { "epoch": 3.932594644506002, "grad_norm": 2.5093820095062256, "learning_rate": 6.384e-06, "loss": 0.4014, "step": 2131 }, { "epoch": 3.9344413665743305, "grad_norm": 1.0541409254074097, "learning_rate": 6.387e-06, "loss": 0.2948, "step": 2132 }, { "epoch": 3.936288088642659, "grad_norm": 1.5808500051498413, "learning_rate": 6.39e-06, "loss": 0.2711, "step": 2133 }, { "epoch": 3.938134810710988, "grad_norm": 1.3126081228256226, "learning_rate": 6.3930000000000005e-06, "loss": 0.2422, "step": 2134 }, { "epoch": 3.9399815327793166, "grad_norm": 1.0315762758255005, "learning_rate": 6.396e-06, "loss": 0.1848, "step": 2135 }, { "epoch": 3.9418282548476453, "grad_norm": 1.1984046697616577, "learning_rate": 6.399e-06, "loss": 0.1908, "step": 2136 }, { "epoch": 3.943674976915974, "grad_norm": 0.9420778155326843, "learning_rate": 6.402000000000001e-06, "loss": 0.1425, "step": 2137 }, { "epoch": 3.9455216989843027, "grad_norm": 1.1212126016616821, "learning_rate": 6.405e-06, "loss": 0.1209, "step": 2138 }, { "epoch": 3.9473684210526314, "grad_norm": 0.931004524230957, "learning_rate": 6.408e-06, "loss": 0.1815, "step": 2139 }, { "epoch": 3.94921514312096, "grad_norm": 0.990565836429596, "learning_rate": 6.411e-06, "loss": 0.0673, "step": 2140 }, { "epoch": 3.9510618651892893, "grad_norm": 1.6753158569335938, "learning_rate": 6.414e-06, "loss": 0.1226, "step": 2141 }, { "epoch": 3.9529085872576175, "grad_norm": 1.456681251525879, "learning_rate": 6.4170000000000006e-06, "loss": 0.1283, "step": 2142 }, { "epoch": 3.9547553093259467, "grad_norm": 1.3051668405532837, "learning_rate": 6.42e-06, "loss": 0.1209, "step": 2143 }, { "epoch": 3.956602031394275, "grad_norm": 1.251253604888916, "learning_rate": 6.423e-06, "loss": 0.0885, "step": 2144 }, { "epoch": 3.958448753462604, "grad_norm": 1.232637882232666, "learning_rate": 6.426e-06, "loss": 0.1096, "step": 2145 }, { "epoch": 3.9602954755309328, "grad_norm": 1.351613163948059, "learning_rate": 6.429e-06, "loss": 0.0987, "step": 2146 }, { "epoch": 3.9621421975992615, "grad_norm": 1.1840687990188599, "learning_rate": 6.432e-06, "loss": 0.0946, "step": 2147 }, { "epoch": 3.96398891966759, "grad_norm": 1.0928349494934082, "learning_rate": 6.435e-06, "loss": 0.1247, "step": 2148 }, { "epoch": 3.965835641735919, "grad_norm": 1.0318615436553955, "learning_rate": 6.438000000000001e-06, "loss": 0.1022, "step": 2149 }, { "epoch": 3.9676823638042475, "grad_norm": 0.8724531531333923, "learning_rate": 6.441e-06, "loss": 0.0947, "step": 2150 }, { "epoch": 3.9695290858725762, "grad_norm": 1.2489190101623535, "learning_rate": 6.444e-06, "loss": 0.0858, "step": 2151 }, { "epoch": 3.971375807940905, "grad_norm": 1.392446517944336, "learning_rate": 6.447e-06, "loss": 0.0902, "step": 2152 }, { "epoch": 3.9732225300092336, "grad_norm": 1.2182376384735107, "learning_rate": 6.45e-06, "loss": 0.0871, "step": 2153 }, { "epoch": 3.9750692520775623, "grad_norm": 1.9293391704559326, "learning_rate": 6.453000000000001e-06, "loss": 0.1234, "step": 2154 }, { "epoch": 3.976915974145891, "grad_norm": 1.5359190702438354, "learning_rate": 6.4560000000000005e-06, "loss": 0.1224, "step": 2155 }, { "epoch": 3.9787626962142197, "grad_norm": 1.3025881052017212, "learning_rate": 6.4589999999999995e-06, "loss": 0.1664, "step": 2156 }, { "epoch": 3.9806094182825484, "grad_norm": 1.6642571687698364, "learning_rate": 6.462e-06, "loss": 0.0904, "step": 2157 }, { "epoch": 3.982456140350877, "grad_norm": 1.8174163103103638, "learning_rate": 6.465e-06, "loss": 0.1102, "step": 2158 }, { "epoch": 3.984302862419206, "grad_norm": 2.3735907077789307, "learning_rate": 6.468000000000001e-06, "loss": 0.1019, "step": 2159 }, { "epoch": 3.9861495844875345, "grad_norm": 1.7283834218978882, "learning_rate": 6.471e-06, "loss": 0.0737, "step": 2160 }, { "epoch": 3.9879963065558632, "grad_norm": 5.035957336425781, "learning_rate": 6.474e-06, "loss": 0.105, "step": 2161 }, { "epoch": 3.989843028624192, "grad_norm": 1.3808785676956177, "learning_rate": 6.477000000000001e-06, "loss": 0.0946, "step": 2162 }, { "epoch": 3.9916897506925206, "grad_norm": 1.2071834802627563, "learning_rate": 6.48e-06, "loss": 0.1027, "step": 2163 }, { "epoch": 3.9935364727608493, "grad_norm": 1.04526948928833, "learning_rate": 6.483e-06, "loss": 0.0779, "step": 2164 }, { "epoch": 3.995383194829178, "grad_norm": 1.663916826248169, "learning_rate": 6.486e-06, "loss": 0.1289, "step": 2165 }, { "epoch": 3.997229916897507, "grad_norm": 1.2232156991958618, "learning_rate": 6.489e-06, "loss": 0.0969, "step": 2166 }, { "epoch": 3.9990766389658354, "grad_norm": 1.5424480438232422, "learning_rate": 6.492000000000001e-06, "loss": 0.1221, "step": 2167 }, { "epoch": 4.0, "grad_norm": 2.185368299484253, "learning_rate": 6.4950000000000005e-06, "loss": 0.0528, "step": 2168 }, { "epoch": 4.001846722068329, "grad_norm": 1.911673665046692, "learning_rate": 6.4979999999999994e-06, "loss": 0.5474, "step": 2169 }, { "epoch": 4.003693444136657, "grad_norm": 1.5852998495101929, "learning_rate": 6.501e-06, "loss": 0.4573, "step": 2170 }, { "epoch": 4.0055401662049865, "grad_norm": 2.0277857780456543, "learning_rate": 6.504e-06, "loss": 0.3894, "step": 2171 }, { "epoch": 4.007386888273315, "grad_norm": 2.6222214698791504, "learning_rate": 6.5070000000000005e-06, "loss": 0.454, "step": 2172 }, { "epoch": 4.009233610341644, "grad_norm": 1.520296573638916, "learning_rate": 6.51e-06, "loss": 0.3785, "step": 2173 }, { "epoch": 4.011080332409972, "grad_norm": 1.2729988098144531, "learning_rate": 6.513e-06, "loss": 0.3233, "step": 2174 }, { "epoch": 4.012927054478301, "grad_norm": 1.6835660934448242, "learning_rate": 6.516e-06, "loss": 0.2551, "step": 2175 }, { "epoch": 4.01477377654663, "grad_norm": 1.376844048500061, "learning_rate": 6.519e-06, "loss": 0.2445, "step": 2176 }, { "epoch": 4.016620498614959, "grad_norm": 1.2151410579681396, "learning_rate": 6.522e-06, "loss": 0.336, "step": 2177 }, { "epoch": 4.018467220683287, "grad_norm": 1.3106422424316406, "learning_rate": 6.525e-06, "loss": 0.3283, "step": 2178 }, { "epoch": 4.020313942751616, "grad_norm": 1.0718411207199097, "learning_rate": 6.528e-06, "loss": 0.2019, "step": 2179 }, { "epoch": 4.022160664819944, "grad_norm": 1.2590523958206177, "learning_rate": 6.531000000000001e-06, "loss": 0.1917, "step": 2180 }, { "epoch": 4.0240073868882735, "grad_norm": 1.6002975702285767, "learning_rate": 6.534e-06, "loss": 0.2292, "step": 2181 }, { "epoch": 4.025854108956602, "grad_norm": 0.8854488730430603, "learning_rate": 6.537e-06, "loss": 0.1327, "step": 2182 }, { "epoch": 4.027700831024931, "grad_norm": 1.2897201776504517, "learning_rate": 6.54e-06, "loss": 0.1739, "step": 2183 }, { "epoch": 4.029547553093259, "grad_norm": 1.1138757467269897, "learning_rate": 6.543e-06, "loss": 0.1351, "step": 2184 }, { "epoch": 4.031394275161588, "grad_norm": 1.1339983940124512, "learning_rate": 6.5460000000000005e-06, "loss": 0.1039, "step": 2185 }, { "epoch": 4.033240997229917, "grad_norm": 0.9088255167007446, "learning_rate": 6.549e-06, "loss": 0.1187, "step": 2186 }, { "epoch": 4.035087719298246, "grad_norm": 1.0308983325958252, "learning_rate": 6.552e-06, "loss": 0.08, "step": 2187 }, { "epoch": 4.036934441366574, "grad_norm": 2.115433692932129, "learning_rate": 6.555e-06, "loss": 0.106, "step": 2188 }, { "epoch": 4.038781163434903, "grad_norm": 1.531274437904358, "learning_rate": 6.558e-06, "loss": 0.1008, "step": 2189 }, { "epoch": 4.040627885503231, "grad_norm": 1.2350906133651733, "learning_rate": 6.561e-06, "loss": 0.0928, "step": 2190 }, { "epoch": 4.0424746075715605, "grad_norm": 0.9239581227302551, "learning_rate": 6.564e-06, "loss": 0.0777, "step": 2191 }, { "epoch": 4.044321329639889, "grad_norm": 0.9720399975776672, "learning_rate": 6.567000000000001e-06, "loss": 0.0563, "step": 2192 }, { "epoch": 4.046168051708218, "grad_norm": 0.8621312379837036, "learning_rate": 6.57e-06, "loss": 0.0581, "step": 2193 }, { "epoch": 4.048014773776547, "grad_norm": 3.7505435943603516, "learning_rate": 6.573e-06, "loss": 0.0927, "step": 2194 }, { "epoch": 4.049861495844875, "grad_norm": 1.2696458101272583, "learning_rate": 6.576e-06, "loss": 0.0902, "step": 2195 }, { "epoch": 4.0517082179132045, "grad_norm": 2.1792521476745605, "learning_rate": 6.579e-06, "loss": 0.1256, "step": 2196 }, { "epoch": 4.053554939981533, "grad_norm": 1.2086941003799438, "learning_rate": 6.582000000000001e-06, "loss": 0.1057, "step": 2197 }, { "epoch": 4.055401662049862, "grad_norm": 1.3100030422210693, "learning_rate": 6.5850000000000005e-06, "loss": 0.0875, "step": 2198 }, { "epoch": 4.05724838411819, "grad_norm": 1.3629885911941528, "learning_rate": 6.5879999999999994e-06, "loss": 0.1275, "step": 2199 }, { "epoch": 4.059095106186519, "grad_norm": 4.132709980010986, "learning_rate": 6.591e-06, "loss": 0.0776, "step": 2200 }, { "epoch": 4.0609418282548475, "grad_norm": 1.144539475440979, "learning_rate": 6.594e-06, "loss": 0.0939, "step": 2201 }, { "epoch": 4.062788550323177, "grad_norm": 1.3136900663375854, "learning_rate": 6.5970000000000005e-06, "loss": 0.0804, "step": 2202 }, { "epoch": 4.064635272391505, "grad_norm": 1.6249648332595825, "learning_rate": 6.6e-06, "loss": 0.0888, "step": 2203 }, { "epoch": 4.066481994459834, "grad_norm": 1.2383456230163574, "learning_rate": 6.603e-06, "loss": 0.0901, "step": 2204 }, { "epoch": 4.068328716528162, "grad_norm": 1.428479552268982, "learning_rate": 6.606000000000001e-06, "loss": 0.1368, "step": 2205 }, { "epoch": 4.0701754385964914, "grad_norm": 1.5173728466033936, "learning_rate": 6.609e-06, "loss": 0.0862, "step": 2206 }, { "epoch": 4.07202216066482, "grad_norm": 1.3671185970306396, "learning_rate": 6.612e-06, "loss": 0.0681, "step": 2207 }, { "epoch": 4.073868882733149, "grad_norm": 1.0532758235931396, "learning_rate": 6.615e-06, "loss": 0.0832, "step": 2208 }, { "epoch": 4.075715604801477, "grad_norm": 1.199684977531433, "learning_rate": 6.618e-06, "loss": 0.0798, "step": 2209 }, { "epoch": 4.077562326869806, "grad_norm": 2.115050792694092, "learning_rate": 6.621000000000001e-06, "loss": 0.1533, "step": 2210 }, { "epoch": 4.0794090489381345, "grad_norm": 1.1212481260299683, "learning_rate": 6.6240000000000004e-06, "loss": 0.0706, "step": 2211 }, { "epoch": 4.081255771006464, "grad_norm": 1.539804458618164, "learning_rate": 6.627e-06, "loss": 0.0979, "step": 2212 }, { "epoch": 4.083102493074792, "grad_norm": 1.9622479677200317, "learning_rate": 6.63e-06, "loss": 0.1041, "step": 2213 }, { "epoch": 4.084949215143121, "grad_norm": 1.1550004482269287, "learning_rate": 6.633e-06, "loss": 0.0762, "step": 2214 }, { "epoch": 4.086795937211449, "grad_norm": 1.2409030199050903, "learning_rate": 6.6360000000000005e-06, "loss": 0.1235, "step": 2215 }, { "epoch": 4.088642659279778, "grad_norm": 1.394726276397705, "learning_rate": 6.639e-06, "loss": 0.0847, "step": 2216 }, { "epoch": 4.090489381348107, "grad_norm": 1.7710202932357788, "learning_rate": 6.642000000000001e-06, "loss": 0.1489, "step": 2217 }, { "epoch": 4.092336103416436, "grad_norm": 1.8116905689239502, "learning_rate": 6.645e-06, "loss": 0.1583, "step": 2218 }, { "epoch": 4.094182825484765, "grad_norm": 2.0445168018341064, "learning_rate": 6.648e-06, "loss": 0.4672, "step": 2219 }, { "epoch": 4.096029547553093, "grad_norm": 1.6109222173690796, "learning_rate": 6.651e-06, "loss": 0.3575, "step": 2220 }, { "epoch": 4.097876269621422, "grad_norm": 1.006513237953186, "learning_rate": 6.654e-06, "loss": 0.3371, "step": 2221 }, { "epoch": 4.099722991689751, "grad_norm": 1.294262409210205, "learning_rate": 6.657e-06, "loss": 0.4369, "step": 2222 }, { "epoch": 4.10156971375808, "grad_norm": 2.5763139724731445, "learning_rate": 6.660000000000001e-06, "loss": 0.3756, "step": 2223 }, { "epoch": 4.103416435826408, "grad_norm": 1.9553343057632446, "learning_rate": 6.6629999999999996e-06, "loss": 0.3584, "step": 2224 }, { "epoch": 4.105263157894737, "grad_norm": 1.4016964435577393, "learning_rate": 6.666e-06, "loss": 0.2867, "step": 2225 }, { "epoch": 4.107109879963065, "grad_norm": 0.8154698610305786, "learning_rate": 6.669e-06, "loss": 0.2086, "step": 2226 }, { "epoch": 4.108956602031395, "grad_norm": 1.5167957544326782, "learning_rate": 6.672e-06, "loss": 0.2631, "step": 2227 }, { "epoch": 4.110803324099723, "grad_norm": 1.1658596992492676, "learning_rate": 6.6750000000000005e-06, "loss": 0.3025, "step": 2228 }, { "epoch": 4.112650046168052, "grad_norm": 1.6568107604980469, "learning_rate": 6.678e-06, "loss": 0.2013, "step": 2229 }, { "epoch": 4.11449676823638, "grad_norm": 1.6511210203170776, "learning_rate": 6.681e-06, "loss": 0.2295, "step": 2230 }, { "epoch": 4.116343490304709, "grad_norm": 0.9506266713142395, "learning_rate": 6.684e-06, "loss": 0.1298, "step": 2231 }, { "epoch": 4.118190212373038, "grad_norm": 1.0544261932373047, "learning_rate": 6.687e-06, "loss": 0.1572, "step": 2232 }, { "epoch": 4.120036934441367, "grad_norm": 1.4018826484680176, "learning_rate": 6.69e-06, "loss": 0.1093, "step": 2233 }, { "epoch": 4.121883656509695, "grad_norm": 0.9542692303657532, "learning_rate": 6.693e-06, "loss": 0.112, "step": 2234 }, { "epoch": 4.123730378578024, "grad_norm": 1.1946085691452026, "learning_rate": 6.696000000000001e-06, "loss": 0.0899, "step": 2235 }, { "epoch": 4.125577100646352, "grad_norm": 0.9538592100143433, "learning_rate": 6.699e-06, "loss": 0.0872, "step": 2236 }, { "epoch": 4.127423822714682, "grad_norm": 1.4511198997497559, "learning_rate": 6.7019999999999995e-06, "loss": 0.0991, "step": 2237 }, { "epoch": 4.12927054478301, "grad_norm": 1.070308804512024, "learning_rate": 6.705e-06, "loss": 0.1096, "step": 2238 }, { "epoch": 4.131117266851339, "grad_norm": 0.942848265171051, "learning_rate": 6.708e-06, "loss": 0.0773, "step": 2239 }, { "epoch": 4.132963988919667, "grad_norm": 1.3079289197921753, "learning_rate": 6.711000000000001e-06, "loss": 0.1019, "step": 2240 }, { "epoch": 4.134810710987996, "grad_norm": 1.2125372886657715, "learning_rate": 6.7140000000000004e-06, "loss": 0.1144, "step": 2241 }, { "epoch": 4.136657433056325, "grad_norm": 1.0549064874649048, "learning_rate": 6.716999999999999e-06, "loss": 0.088, "step": 2242 }, { "epoch": 4.138504155124654, "grad_norm": 1.2122917175292969, "learning_rate": 6.72e-06, "loss": 0.1125, "step": 2243 }, { "epoch": 4.140350877192983, "grad_norm": 0.9945119619369507, "learning_rate": 6.723e-06, "loss": 0.0933, "step": 2244 }, { "epoch": 4.142197599261311, "grad_norm": 1.1362215280532837, "learning_rate": 6.7260000000000005e-06, "loss": 0.1095, "step": 2245 }, { "epoch": 4.14404432132964, "grad_norm": 1.3205327987670898, "learning_rate": 6.729e-06, "loss": 0.1808, "step": 2246 }, { "epoch": 4.1458910433979685, "grad_norm": 1.3926762342453003, "learning_rate": 6.732e-06, "loss": 0.1172, "step": 2247 }, { "epoch": 4.147737765466298, "grad_norm": 1.0345947742462158, "learning_rate": 6.735000000000001e-06, "loss": 0.0861, "step": 2248 }, { "epoch": 4.149584487534626, "grad_norm": 1.600387692451477, "learning_rate": 6.738e-06, "loss": 0.127, "step": 2249 }, { "epoch": 4.151431209602955, "grad_norm": 1.2428592443466187, "learning_rate": 6.741e-06, "loss": 0.1019, "step": 2250 }, { "epoch": 4.153277931671283, "grad_norm": 1.1123014688491821, "learning_rate": 6.744e-06, "loss": 0.0856, "step": 2251 }, { "epoch": 4.1551246537396125, "grad_norm": 1.348739504814148, "learning_rate": 6.747e-06, "loss": 0.1091, "step": 2252 }, { "epoch": 4.156971375807941, "grad_norm": 1.0651084184646606, "learning_rate": 6.750000000000001e-06, "loss": 0.0778, "step": 2253 }, { "epoch": 4.15881809787627, "grad_norm": 1.460421085357666, "learning_rate": 6.753e-06, "loss": 0.078, "step": 2254 }, { "epoch": 4.160664819944598, "grad_norm": 0.9551540017127991, "learning_rate": 6.756e-06, "loss": 0.0792, "step": 2255 }, { "epoch": 4.162511542012927, "grad_norm": 1.3960237503051758, "learning_rate": 6.759e-06, "loss": 0.068, "step": 2256 }, { "epoch": 4.1643582640812555, "grad_norm": 1.785151481628418, "learning_rate": 6.762e-06, "loss": 0.0848, "step": 2257 }, { "epoch": 4.166204986149585, "grad_norm": 1.0868968963623047, "learning_rate": 6.7650000000000005e-06, "loss": 0.0919, "step": 2258 }, { "epoch": 4.168051708217913, "grad_norm": 1.7152469158172607, "learning_rate": 6.768e-06, "loss": 0.1152, "step": 2259 }, { "epoch": 4.169898430286242, "grad_norm": 1.454818606376648, "learning_rate": 6.771000000000001e-06, "loss": 0.1094, "step": 2260 }, { "epoch": 4.17174515235457, "grad_norm": 1.5478395223617554, "learning_rate": 6.774e-06, "loss": 0.1006, "step": 2261 }, { "epoch": 4.1735918744228995, "grad_norm": 1.4219355583190918, "learning_rate": 6.777e-06, "loss": 0.1003, "step": 2262 }, { "epoch": 4.175438596491228, "grad_norm": 1.2621865272521973, "learning_rate": 6.78e-06, "loss": 0.0945, "step": 2263 }, { "epoch": 4.177285318559557, "grad_norm": 1.3872356414794922, "learning_rate": 6.783e-06, "loss": 0.07, "step": 2264 }, { "epoch": 4.179132040627885, "grad_norm": 1.5787618160247803, "learning_rate": 6.786000000000001e-06, "loss": 0.0851, "step": 2265 }, { "epoch": 4.180978762696214, "grad_norm": 1.548042893409729, "learning_rate": 6.7890000000000006e-06, "loss": 0.11, "step": 2266 }, { "epoch": 4.1828254847645425, "grad_norm": 1.557713270187378, "learning_rate": 6.7919999999999995e-06, "loss": 0.097, "step": 2267 }, { "epoch": 4.184672206832872, "grad_norm": 1.7162854671478271, "learning_rate": 6.795e-06, "loss": 0.1412, "step": 2268 }, { "epoch": 4.186518928901201, "grad_norm": 4.086080551147461, "learning_rate": 6.798e-06, "loss": 0.4405, "step": 2269 }, { "epoch": 4.188365650969529, "grad_norm": 3.8445074558258057, "learning_rate": 6.801000000000001e-06, "loss": 0.438, "step": 2270 }, { "epoch": 4.190212373037858, "grad_norm": 2.2936015129089355, "learning_rate": 6.804e-06, "loss": 0.415, "step": 2271 }, { "epoch": 4.1920590951061865, "grad_norm": 1.6141554117202759, "learning_rate": 6.807e-06, "loss": 0.3014, "step": 2272 }, { "epoch": 4.193905817174516, "grad_norm": 2.340399742126465, "learning_rate": 6.81e-06, "loss": 0.347, "step": 2273 }, { "epoch": 4.195752539242844, "grad_norm": 2.091618299484253, "learning_rate": 6.813e-06, "loss": 0.3336, "step": 2274 }, { "epoch": 4.197599261311173, "grad_norm": 1.398072361946106, "learning_rate": 6.8160000000000005e-06, "loss": 0.2575, "step": 2275 }, { "epoch": 4.199445983379501, "grad_norm": 1.7019211053848267, "learning_rate": 6.819e-06, "loss": 0.358, "step": 2276 }, { "epoch": 4.20129270544783, "grad_norm": 2.1351137161254883, "learning_rate": 6.822e-06, "loss": 0.3326, "step": 2277 }, { "epoch": 4.203139427516159, "grad_norm": 0.9898922443389893, "learning_rate": 6.825000000000001e-06, "loss": 0.2067, "step": 2278 }, { "epoch": 4.204986149584488, "grad_norm": 1.372165560722351, "learning_rate": 6.828e-06, "loss": 0.213, "step": 2279 }, { "epoch": 4.206832871652816, "grad_norm": 1.582594633102417, "learning_rate": 6.831e-06, "loss": 0.2519, "step": 2280 }, { "epoch": 4.208679593721145, "grad_norm": 1.4090359210968018, "learning_rate": 6.834e-06, "loss": 0.2676, "step": 2281 }, { "epoch": 4.2105263157894735, "grad_norm": 1.1971672773361206, "learning_rate": 6.837e-06, "loss": 0.1274, "step": 2282 }, { "epoch": 4.212373037857803, "grad_norm": 1.5907059907913208, "learning_rate": 6.840000000000001e-06, "loss": 0.1727, "step": 2283 }, { "epoch": 4.214219759926131, "grad_norm": 1.0141587257385254, "learning_rate": 6.843e-06, "loss": 0.1094, "step": 2284 }, { "epoch": 4.21606648199446, "grad_norm": 1.7501100301742554, "learning_rate": 6.845999999999999e-06, "loss": 0.1339, "step": 2285 }, { "epoch": 4.217913204062788, "grad_norm": 1.2493884563446045, "learning_rate": 6.849e-06, "loss": 0.1022, "step": 2286 }, { "epoch": 4.219759926131117, "grad_norm": 1.592990517616272, "learning_rate": 6.852e-06, "loss": 0.0944, "step": 2287 }, { "epoch": 4.221606648199446, "grad_norm": 1.0477226972579956, "learning_rate": 6.8550000000000004e-06, "loss": 0.0782, "step": 2288 }, { "epoch": 4.223453370267775, "grad_norm": 1.2984161376953125, "learning_rate": 6.858e-06, "loss": 0.1491, "step": 2289 }, { "epoch": 4.225300092336103, "grad_norm": 1.1166598796844482, "learning_rate": 6.861e-06, "loss": 0.1148, "step": 2290 }, { "epoch": 4.227146814404432, "grad_norm": 1.163062572479248, "learning_rate": 6.864000000000001e-06, "loss": 0.0927, "step": 2291 }, { "epoch": 4.22899353647276, "grad_norm": 0.9511471390724182, "learning_rate": 6.867e-06, "loss": 0.0874, "step": 2292 }, { "epoch": 4.23084025854109, "grad_norm": 1.0879565477371216, "learning_rate": 6.87e-06, "loss": 0.0665, "step": 2293 }, { "epoch": 4.232686980609419, "grad_norm": 1.0754992961883545, "learning_rate": 6.873e-06, "loss": 0.0981, "step": 2294 }, { "epoch": 4.234533702677747, "grad_norm": 1.0654864311218262, "learning_rate": 6.876e-06, "loss": 0.0671, "step": 2295 }, { "epoch": 4.236380424746076, "grad_norm": 1.311405897140503, "learning_rate": 6.8790000000000005e-06, "loss": 0.084, "step": 2296 }, { "epoch": 4.238227146814404, "grad_norm": 1.3197026252746582, "learning_rate": 6.882e-06, "loss": 0.0855, "step": 2297 }, { "epoch": 4.2400738688827335, "grad_norm": 1.2920937538146973, "learning_rate": 6.885e-06, "loss": 0.1308, "step": 2298 }, { "epoch": 4.241920590951062, "grad_norm": 1.233723759651184, "learning_rate": 6.888e-06, "loss": 0.0839, "step": 2299 }, { "epoch": 4.243767313019391, "grad_norm": 1.194996953010559, "learning_rate": 6.891e-06, "loss": 0.0765, "step": 2300 }, { "epoch": 4.245614035087719, "grad_norm": 1.2588831186294556, "learning_rate": 6.894e-06, "loss": 0.0886, "step": 2301 }, { "epoch": 4.247460757156048, "grad_norm": 0.7167526483535767, "learning_rate": 6.897e-06, "loss": 0.0516, "step": 2302 }, { "epoch": 4.249307479224377, "grad_norm": 0.928087055683136, "learning_rate": 6.900000000000001e-06, "loss": 0.0712, "step": 2303 }, { "epoch": 4.251154201292706, "grad_norm": 1.2723053693771362, "learning_rate": 6.903e-06, "loss": 0.1029, "step": 2304 }, { "epoch": 4.253000923361034, "grad_norm": 1.5679049491882324, "learning_rate": 6.906e-06, "loss": 0.0929, "step": 2305 }, { "epoch": 4.254847645429363, "grad_norm": 1.2212111949920654, "learning_rate": 6.909e-06, "loss": 0.0773, "step": 2306 }, { "epoch": 4.256694367497691, "grad_norm": 0.8988540768623352, "learning_rate": 6.912e-06, "loss": 0.085, "step": 2307 }, { "epoch": 4.2585410895660205, "grad_norm": 1.235801100730896, "learning_rate": 6.915000000000001e-06, "loss": 0.1045, "step": 2308 }, { "epoch": 4.260387811634349, "grad_norm": 1.007037878036499, "learning_rate": 6.9180000000000005e-06, "loss": 0.0806, "step": 2309 }, { "epoch": 4.262234533702678, "grad_norm": 1.2111518383026123, "learning_rate": 6.9209999999999995e-06, "loss": 0.0723, "step": 2310 }, { "epoch": 4.264081255771006, "grad_norm": 1.328084945678711, "learning_rate": 6.924e-06, "loss": 0.1017, "step": 2311 }, { "epoch": 4.265927977839335, "grad_norm": 1.5174875259399414, "learning_rate": 6.927e-06, "loss": 0.0943, "step": 2312 }, { "epoch": 4.267774699907664, "grad_norm": 1.4504024982452393, "learning_rate": 6.9300000000000006e-06, "loss": 0.0932, "step": 2313 }, { "epoch": 4.269621421975993, "grad_norm": 1.2458757162094116, "learning_rate": 6.933e-06, "loss": 0.1036, "step": 2314 }, { "epoch": 4.271468144044321, "grad_norm": 1.3173937797546387, "learning_rate": 6.936e-06, "loss": 0.1401, "step": 2315 }, { "epoch": 4.27331486611265, "grad_norm": 1.7283494472503662, "learning_rate": 6.939e-06, "loss": 0.1783, "step": 2316 }, { "epoch": 4.275161588180978, "grad_norm": 2.916658639907837, "learning_rate": 6.942e-06, "loss": 0.0841, "step": 2317 }, { "epoch": 4.2770083102493075, "grad_norm": 1.529240608215332, "learning_rate": 6.945e-06, "loss": 0.1205, "step": 2318 }, { "epoch": 4.278855032317637, "grad_norm": 2.7654285430908203, "learning_rate": 6.948e-06, "loss": 0.5173, "step": 2319 }, { "epoch": 4.280701754385965, "grad_norm": 1.5961171388626099, "learning_rate": 6.951e-06, "loss": 0.4608, "step": 2320 }, { "epoch": 4.282548476454294, "grad_norm": 1.9149647951126099, "learning_rate": 6.954000000000001e-06, "loss": 0.3426, "step": 2321 }, { "epoch": 4.284395198522622, "grad_norm": 1.4873987436294556, "learning_rate": 6.957e-06, "loss": 0.3897, "step": 2322 }, { "epoch": 4.286241920590951, "grad_norm": 1.1254777908325195, "learning_rate": 6.96e-06, "loss": 0.338, "step": 2323 }, { "epoch": 4.28808864265928, "grad_norm": 1.5503439903259277, "learning_rate": 6.963e-06, "loss": 0.4296, "step": 2324 }, { "epoch": 4.289935364727609, "grad_norm": 1.42909574508667, "learning_rate": 6.966e-06, "loss": 0.2739, "step": 2325 }, { "epoch": 4.291782086795937, "grad_norm": 1.3683276176452637, "learning_rate": 6.9690000000000005e-06, "loss": 0.2861, "step": 2326 }, { "epoch": 4.293628808864266, "grad_norm": 1.140487790107727, "learning_rate": 6.972e-06, "loss": 0.1559, "step": 2327 }, { "epoch": 4.2954755309325945, "grad_norm": 1.4923127889633179, "learning_rate": 6.975e-06, "loss": 0.2232, "step": 2328 }, { "epoch": 4.297322253000924, "grad_norm": 1.2142882347106934, "learning_rate": 6.978e-06, "loss": 0.2329, "step": 2329 }, { "epoch": 4.299168975069252, "grad_norm": 1.9966238737106323, "learning_rate": 6.981e-06, "loss": 0.238, "step": 2330 }, { "epoch": 4.301015697137581, "grad_norm": 2.1601319313049316, "learning_rate": 6.984e-06, "loss": 0.2103, "step": 2331 }, { "epoch": 4.302862419205909, "grad_norm": 1.0669894218444824, "learning_rate": 6.987e-06, "loss": 0.1036, "step": 2332 }, { "epoch": 4.304709141274238, "grad_norm": 1.0218197107315063, "learning_rate": 6.990000000000001e-06, "loss": 0.1374, "step": 2333 }, { "epoch": 4.306555863342567, "grad_norm": 0.9428393244743347, "learning_rate": 6.993000000000001e-06, "loss": 0.1168, "step": 2334 }, { "epoch": 4.308402585410896, "grad_norm": 0.9400322437286377, "learning_rate": 6.996e-06, "loss": 0.0822, "step": 2335 }, { "epoch": 4.310249307479224, "grad_norm": 1.2620066404342651, "learning_rate": 6.999e-06, "loss": 0.1182, "step": 2336 }, { "epoch": 4.312096029547553, "grad_norm": 0.9195238351821899, "learning_rate": 7.002e-06, "loss": 0.0916, "step": 2337 }, { "epoch": 4.3139427516158815, "grad_norm": 1.087558388710022, "learning_rate": 7.005000000000001e-06, "loss": 0.1864, "step": 2338 }, { "epoch": 4.315789473684211, "grad_norm": 1.180001974105835, "learning_rate": 7.0080000000000005e-06, "loss": 0.0721, "step": 2339 }, { "epoch": 4.317636195752539, "grad_norm": 1.4501054286956787, "learning_rate": 7.011e-06, "loss": 0.1138, "step": 2340 }, { "epoch": 4.319482917820868, "grad_norm": 0.8645310401916504, "learning_rate": 7.014e-06, "loss": 0.0771, "step": 2341 }, { "epoch": 4.321329639889196, "grad_norm": 0.9253374338150024, "learning_rate": 7.017e-06, "loss": 0.0759, "step": 2342 }, { "epoch": 4.323176361957525, "grad_norm": 0.88505619764328, "learning_rate": 7.0200000000000006e-06, "loss": 0.0791, "step": 2343 }, { "epoch": 4.325023084025855, "grad_norm": 1.6287795305252075, "learning_rate": 7.023e-06, "loss": 0.1053, "step": 2344 }, { "epoch": 4.326869806094183, "grad_norm": 0.9386304616928101, "learning_rate": 7.026e-06, "loss": 0.0667, "step": 2345 }, { "epoch": 4.328716528162512, "grad_norm": 0.9785365462303162, "learning_rate": 7.029000000000001e-06, "loss": 0.0814, "step": 2346 }, { "epoch": 4.33056325023084, "grad_norm": 1.2905381917953491, "learning_rate": 7.032e-06, "loss": 0.0959, "step": 2347 }, { "epoch": 4.332409972299169, "grad_norm": 1.4337656497955322, "learning_rate": 7.0349999999999996e-06, "loss": 0.0682, "step": 2348 }, { "epoch": 4.334256694367498, "grad_norm": 0.8870456218719482, "learning_rate": 7.038e-06, "loss": 0.069, "step": 2349 }, { "epoch": 4.336103416435827, "grad_norm": 1.0684574842453003, "learning_rate": 7.041e-06, "loss": 0.0877, "step": 2350 }, { "epoch": 4.337950138504155, "grad_norm": 1.24235999584198, "learning_rate": 7.044000000000001e-06, "loss": 0.0994, "step": 2351 }, { "epoch": 4.339796860572484, "grad_norm": 1.1478756666183472, "learning_rate": 7.0470000000000005e-06, "loss": 0.0899, "step": 2352 }, { "epoch": 4.341643582640812, "grad_norm": 0.9353871941566467, "learning_rate": 7.049999999999999e-06, "loss": 0.0829, "step": 2353 }, { "epoch": 4.3434903047091415, "grad_norm": 1.3551195859909058, "learning_rate": 7.053e-06, "loss": 0.1088, "step": 2354 }, { "epoch": 4.34533702677747, "grad_norm": 1.18695068359375, "learning_rate": 7.056e-06, "loss": 0.1109, "step": 2355 }, { "epoch": 4.347183748845799, "grad_norm": 1.437421202659607, "learning_rate": 7.0590000000000005e-06, "loss": 0.0998, "step": 2356 }, { "epoch": 4.349030470914127, "grad_norm": 0.967350959777832, "learning_rate": 7.062e-06, "loss": 0.0864, "step": 2357 }, { "epoch": 4.350877192982456, "grad_norm": 1.421446442604065, "learning_rate": 7.065e-06, "loss": 0.1081, "step": 2358 }, { "epoch": 4.352723915050785, "grad_norm": 1.1982686519622803, "learning_rate": 7.068e-06, "loss": 0.0771, "step": 2359 }, { "epoch": 4.354570637119114, "grad_norm": 1.0983085632324219, "learning_rate": 7.071e-06, "loss": 0.0861, "step": 2360 }, { "epoch": 4.356417359187442, "grad_norm": 2.7813031673431396, "learning_rate": 7.074e-06, "loss": 0.1023, "step": 2361 }, { "epoch": 4.358264081255771, "grad_norm": 1.3879581689834595, "learning_rate": 7.077e-06, "loss": 0.082, "step": 2362 }, { "epoch": 4.360110803324099, "grad_norm": 1.6462188959121704, "learning_rate": 7.08e-06, "loss": 0.0811, "step": 2363 }, { "epoch": 4.3619575253924285, "grad_norm": 1.314080834388733, "learning_rate": 7.083000000000001e-06, "loss": 0.083, "step": 2364 }, { "epoch": 4.363804247460757, "grad_norm": 1.1373904943466187, "learning_rate": 7.086e-06, "loss": 0.0778, "step": 2365 }, { "epoch": 4.365650969529086, "grad_norm": 1.0565918684005737, "learning_rate": 7.089e-06, "loss": 0.0757, "step": 2366 }, { "epoch": 4.367497691597414, "grad_norm": 1.1691209077835083, "learning_rate": 7.092e-06, "loss": 0.0895, "step": 2367 }, { "epoch": 4.369344413665743, "grad_norm": 1.9542254209518433, "learning_rate": 7.095e-06, "loss": 0.1602, "step": 2368 }, { "epoch": 4.3711911357340725, "grad_norm": 4.830984115600586, "learning_rate": 7.0980000000000005e-06, "loss": 0.419, "step": 2369 }, { "epoch": 4.373037857802401, "grad_norm": 2.0716261863708496, "learning_rate": 7.101e-06, "loss": 0.4215, "step": 2370 }, { "epoch": 4.374884579870729, "grad_norm": 1.3052515983581543, "learning_rate": 7.104e-06, "loss": 0.3627, "step": 2371 }, { "epoch": 4.376731301939058, "grad_norm": 1.3134573698043823, "learning_rate": 7.107e-06, "loss": 0.3632, "step": 2372 }, { "epoch": 4.378578024007387, "grad_norm": 2.2939159870147705, "learning_rate": 7.11e-06, "loss": 0.3344, "step": 2373 }, { "epoch": 4.3804247460757155, "grad_norm": 1.4574236869812012, "learning_rate": 7.113e-06, "loss": 0.2472, "step": 2374 }, { "epoch": 4.382271468144045, "grad_norm": 1.3681788444519043, "learning_rate": 7.116e-06, "loss": 0.298, "step": 2375 }, { "epoch": 4.384118190212373, "grad_norm": 1.1303521394729614, "learning_rate": 7.119000000000001e-06, "loss": 0.2606, "step": 2376 }, { "epoch": 4.385964912280702, "grad_norm": 1.774362325668335, "learning_rate": 7.122000000000001e-06, "loss": 0.3622, "step": 2377 }, { "epoch": 4.38781163434903, "grad_norm": 1.030994176864624, "learning_rate": 7.1249999999999995e-06, "loss": 0.2323, "step": 2378 }, { "epoch": 4.3896583564173595, "grad_norm": 1.397836446762085, "learning_rate": 7.128e-06, "loss": 0.2153, "step": 2379 }, { "epoch": 4.391505078485688, "grad_norm": 0.9883468151092529, "learning_rate": 7.131e-06, "loss": 0.1679, "step": 2380 }, { "epoch": 4.393351800554017, "grad_norm": 1.6239488124847412, "learning_rate": 7.134000000000001e-06, "loss": 0.233, "step": 2381 }, { "epoch": 4.395198522622345, "grad_norm": 1.3051291704177856, "learning_rate": 7.1370000000000004e-06, "loss": 0.1932, "step": 2382 }, { "epoch": 4.397045244690674, "grad_norm": 1.332719326019287, "learning_rate": 7.14e-06, "loss": 0.1189, "step": 2383 }, { "epoch": 4.3988919667590025, "grad_norm": 1.0142273902893066, "learning_rate": 7.143e-06, "loss": 0.1665, "step": 2384 }, { "epoch": 4.400738688827332, "grad_norm": 0.8044421076774597, "learning_rate": 7.146e-06, "loss": 0.0814, "step": 2385 }, { "epoch": 4.40258541089566, "grad_norm": 1.1707408428192139, "learning_rate": 7.1490000000000005e-06, "loss": 0.1247, "step": 2386 }, { "epoch": 4.404432132963989, "grad_norm": 1.0917186737060547, "learning_rate": 7.152e-06, "loss": 0.0814, "step": 2387 }, { "epoch": 4.406278855032317, "grad_norm": 1.094245433807373, "learning_rate": 7.155e-06, "loss": 0.0785, "step": 2388 }, { "epoch": 4.4081255771006465, "grad_norm": 1.2464826107025146, "learning_rate": 7.158000000000001e-06, "loss": 0.0984, "step": 2389 }, { "epoch": 4.409972299168975, "grad_norm": 1.0168637037277222, "learning_rate": 7.161e-06, "loss": 0.0717, "step": 2390 }, { "epoch": 4.411819021237304, "grad_norm": 1.3358882665634155, "learning_rate": 7.164e-06, "loss": 0.0797, "step": 2391 }, { "epoch": 4.413665743305632, "grad_norm": 1.3539047241210938, "learning_rate": 7.167e-06, "loss": 0.1148, "step": 2392 }, { "epoch": 4.415512465373961, "grad_norm": 1.1813031435012817, "learning_rate": 7.17e-06, "loss": 0.0852, "step": 2393 }, { "epoch": 4.41735918744229, "grad_norm": 1.0633906126022339, "learning_rate": 7.173000000000001e-06, "loss": 0.1041, "step": 2394 }, { "epoch": 4.419205909510619, "grad_norm": 1.2666691541671753, "learning_rate": 7.176e-06, "loss": 0.0974, "step": 2395 }, { "epoch": 4.421052631578947, "grad_norm": 0.8168101906776428, "learning_rate": 7.179e-06, "loss": 0.0798, "step": 2396 }, { "epoch": 4.422899353647276, "grad_norm": 1.5207481384277344, "learning_rate": 7.182e-06, "loss": 0.0763, "step": 2397 }, { "epoch": 4.424746075715605, "grad_norm": 1.416662335395813, "learning_rate": 7.185e-06, "loss": 0.1024, "step": 2398 }, { "epoch": 4.426592797783933, "grad_norm": 1.1562248468399048, "learning_rate": 7.1880000000000005e-06, "loss": 0.0812, "step": 2399 }, { "epoch": 4.428439519852263, "grad_norm": 1.0115388631820679, "learning_rate": 7.191e-06, "loss": 0.0604, "step": 2400 }, { "epoch": 4.430286241920591, "grad_norm": 1.4185620546340942, "learning_rate": 7.194000000000001e-06, "loss": 0.0885, "step": 2401 }, { "epoch": 4.43213296398892, "grad_norm": 1.5242897272109985, "learning_rate": 7.197e-06, "loss": 0.0708, "step": 2402 }, { "epoch": 4.433979686057248, "grad_norm": 1.4109538793563843, "learning_rate": 7.2e-06, "loss": 0.108, "step": 2403 }, { "epoch": 4.435826408125577, "grad_norm": 0.8287237882614136, "learning_rate": 7.203e-06, "loss": 0.0723, "step": 2404 }, { "epoch": 4.437673130193906, "grad_norm": 0.9260662198066711, "learning_rate": 7.206e-06, "loss": 0.0702, "step": 2405 }, { "epoch": 4.439519852262235, "grad_norm": 1.1125154495239258, "learning_rate": 7.209000000000001e-06, "loss": 0.1022, "step": 2406 }, { "epoch": 4.441366574330563, "grad_norm": 1.1801170110702515, "learning_rate": 7.2120000000000006e-06, "loss": 0.0964, "step": 2407 }, { "epoch": 4.443213296398892, "grad_norm": 1.0909982919692993, "learning_rate": 7.2149999999999995e-06, "loss": 0.0881, "step": 2408 }, { "epoch": 4.44506001846722, "grad_norm": 1.1723145246505737, "learning_rate": 7.218e-06, "loss": 0.0888, "step": 2409 }, { "epoch": 4.44690674053555, "grad_norm": 1.0578376054763794, "learning_rate": 7.221e-06, "loss": 0.0733, "step": 2410 }, { "epoch": 4.448753462603878, "grad_norm": 1.6445304155349731, "learning_rate": 7.224e-06, "loss": 0.083, "step": 2411 }, { "epoch": 4.450600184672207, "grad_norm": 1.5911602973937988, "learning_rate": 7.2270000000000004e-06, "loss": 0.0677, "step": 2412 }, { "epoch": 4.452446906740535, "grad_norm": 1.4749010801315308, "learning_rate": 7.23e-06, "loss": 0.0882, "step": 2413 }, { "epoch": 4.454293628808864, "grad_norm": 1.1367706060409546, "learning_rate": 7.233e-06, "loss": 0.0699, "step": 2414 }, { "epoch": 4.456140350877193, "grad_norm": 1.34004545211792, "learning_rate": 7.236e-06, "loss": 0.0888, "step": 2415 }, { "epoch": 4.457987072945522, "grad_norm": 1.772800087928772, "learning_rate": 7.239e-06, "loss": 0.0837, "step": 2416 }, { "epoch": 4.45983379501385, "grad_norm": 2.071884870529175, "learning_rate": 7.242e-06, "loss": 0.1246, "step": 2417 }, { "epoch": 4.461680517082179, "grad_norm": 3.1921818256378174, "learning_rate": 7.245e-06, "loss": 0.1084, "step": 2418 }, { "epoch": 4.463527239150508, "grad_norm": 1.80342435836792, "learning_rate": 7.248000000000001e-06, "loss": 0.4917, "step": 2419 }, { "epoch": 4.465373961218837, "grad_norm": 1.9937363862991333, "learning_rate": 7.2510000000000005e-06, "loss": 0.3945, "step": 2420 }, { "epoch": 4.467220683287165, "grad_norm": 1.4467228651046753, "learning_rate": 7.2539999999999995e-06, "loss": 0.3882, "step": 2421 }, { "epoch": 4.469067405355494, "grad_norm": 1.2707688808441162, "learning_rate": 7.257e-06, "loss": 0.3186, "step": 2422 }, { "epoch": 4.470914127423823, "grad_norm": 1.1893327236175537, "learning_rate": 7.26e-06, "loss": 0.3115, "step": 2423 }, { "epoch": 4.472760849492151, "grad_norm": 1.9630070924758911, "learning_rate": 7.263000000000001e-06, "loss": 0.2778, "step": 2424 }, { "epoch": 4.4746075715604805, "grad_norm": 1.1939640045166016, "learning_rate": 7.266e-06, "loss": 0.2441, "step": 2425 }, { "epoch": 4.476454293628809, "grad_norm": 1.2537411451339722, "learning_rate": 7.269e-06, "loss": 0.2923, "step": 2426 }, { "epoch": 4.478301015697138, "grad_norm": 1.9572612047195435, "learning_rate": 7.272e-06, "loss": 0.2752, "step": 2427 }, { "epoch": 4.480147737765466, "grad_norm": 1.1313151121139526, "learning_rate": 7.275e-06, "loss": 0.2391, "step": 2428 }, { "epoch": 4.481994459833795, "grad_norm": 1.2185724973678589, "learning_rate": 7.2780000000000005e-06, "loss": 0.2191, "step": 2429 }, { "epoch": 4.4838411819021236, "grad_norm": 1.0316160917282104, "learning_rate": 7.281e-06, "loss": 0.1636, "step": 2430 }, { "epoch": 4.485687903970453, "grad_norm": 0.8509050607681274, "learning_rate": 7.284e-06, "loss": 0.1291, "step": 2431 }, { "epoch": 4.487534626038781, "grad_norm": 1.032467246055603, "learning_rate": 7.287000000000001e-06, "loss": 0.0756, "step": 2432 }, { "epoch": 4.48938134810711, "grad_norm": 1.5208518505096436, "learning_rate": 7.29e-06, "loss": 0.1552, "step": 2433 }, { "epoch": 4.491228070175438, "grad_norm": 1.3500417470932007, "learning_rate": 7.293e-06, "loss": 0.1275, "step": 2434 }, { "epoch": 4.4930747922437675, "grad_norm": 0.8810822367668152, "learning_rate": 7.296e-06, "loss": 0.0998, "step": 2435 }, { "epoch": 4.494921514312096, "grad_norm": 0.7776767611503601, "learning_rate": 7.299e-06, "loss": 0.1161, "step": 2436 }, { "epoch": 4.496768236380425, "grad_norm": 1.5567034482955933, "learning_rate": 7.3020000000000006e-06, "loss": 0.0988, "step": 2437 }, { "epoch": 4.498614958448753, "grad_norm": 1.1697642803192139, "learning_rate": 7.305e-06, "loss": 0.0954, "step": 2438 }, { "epoch": 4.500461680517082, "grad_norm": 0.9913340210914612, "learning_rate": 7.308e-06, "loss": 0.0634, "step": 2439 }, { "epoch": 4.5023084025854105, "grad_norm": 1.37468421459198, "learning_rate": 7.311e-06, "loss": 0.0615, "step": 2440 }, { "epoch": 4.50415512465374, "grad_norm": 1.6015678644180298, "learning_rate": 7.314e-06, "loss": 0.1341, "step": 2441 }, { "epoch": 4.506001846722068, "grad_norm": 1.031510591506958, "learning_rate": 7.317e-06, "loss": 0.0822, "step": 2442 }, { "epoch": 4.507848568790397, "grad_norm": 1.2176783084869385, "learning_rate": 7.32e-06, "loss": 0.0852, "step": 2443 }, { "epoch": 4.509695290858726, "grad_norm": 1.1300382614135742, "learning_rate": 7.323000000000001e-06, "loss": 0.159, "step": 2444 }, { "epoch": 4.5115420129270545, "grad_norm": 1.0426396131515503, "learning_rate": 7.326e-06, "loss": 0.0848, "step": 2445 }, { "epoch": 4.513388734995383, "grad_norm": 1.8430628776550293, "learning_rate": 7.329e-06, "loss": 0.0891, "step": 2446 }, { "epoch": 4.515235457063712, "grad_norm": 1.6290236711502075, "learning_rate": 7.332e-06, "loss": 0.065, "step": 2447 }, { "epoch": 4.517082179132041, "grad_norm": 0.8595914840698242, "learning_rate": 7.335e-06, "loss": 0.0682, "step": 2448 }, { "epoch": 4.518928901200369, "grad_norm": 0.9311236143112183, "learning_rate": 7.338000000000001e-06, "loss": 0.069, "step": 2449 }, { "epoch": 4.520775623268698, "grad_norm": 0.777802050113678, "learning_rate": 7.3410000000000005e-06, "loss": 0.0656, "step": 2450 }, { "epoch": 4.522622345337027, "grad_norm": 1.0638920068740845, "learning_rate": 7.3439999999999995e-06, "loss": 0.0717, "step": 2451 }, { "epoch": 4.524469067405356, "grad_norm": 1.1997246742248535, "learning_rate": 7.347e-06, "loss": 0.0818, "step": 2452 }, { "epoch": 4.526315789473684, "grad_norm": 1.017382025718689, "learning_rate": 7.35e-06, "loss": 0.0821, "step": 2453 }, { "epoch": 4.528162511542013, "grad_norm": 1.1677801609039307, "learning_rate": 7.353000000000001e-06, "loss": 0.0609, "step": 2454 }, { "epoch": 4.5300092336103415, "grad_norm": 1.519492506980896, "learning_rate": 7.356e-06, "loss": 0.0954, "step": 2455 }, { "epoch": 4.531855955678671, "grad_norm": 1.160491704940796, "learning_rate": 7.359e-06, "loss": 0.1009, "step": 2456 }, { "epoch": 4.533702677746999, "grad_norm": 0.9831103086471558, "learning_rate": 7.362e-06, "loss": 0.0919, "step": 2457 }, { "epoch": 4.535549399815328, "grad_norm": 1.2899953126907349, "learning_rate": 7.365e-06, "loss": 0.0827, "step": 2458 }, { "epoch": 4.537396121883656, "grad_norm": 1.563739538192749, "learning_rate": 7.3680000000000004e-06, "loss": 0.0721, "step": 2459 }, { "epoch": 4.539242843951985, "grad_norm": 1.4064379930496216, "learning_rate": 7.371e-06, "loss": 0.0823, "step": 2460 }, { "epoch": 4.541089566020314, "grad_norm": 1.5630841255187988, "learning_rate": 7.374e-06, "loss": 0.0935, "step": 2461 }, { "epoch": 4.542936288088643, "grad_norm": 2.0699667930603027, "learning_rate": 7.377000000000001e-06, "loss": 0.1116, "step": 2462 }, { "epoch": 4.544783010156971, "grad_norm": 1.2968478202819824, "learning_rate": 7.3800000000000005e-06, "loss": 0.0637, "step": 2463 }, { "epoch": 4.5466297322253, "grad_norm": 1.3118655681610107, "learning_rate": 7.383e-06, "loss": 0.0797, "step": 2464 }, { "epoch": 4.5484764542936285, "grad_norm": 3.162208318710327, "learning_rate": 7.386e-06, "loss": 0.102, "step": 2465 }, { "epoch": 4.550323176361958, "grad_norm": 2.0861332416534424, "learning_rate": 7.389e-06, "loss": 0.11, "step": 2466 }, { "epoch": 4.552169898430286, "grad_norm": 1.3612501621246338, "learning_rate": 7.3920000000000005e-06, "loss": 0.1025, "step": 2467 }, { "epoch": 4.554016620498615, "grad_norm": 1.6249091625213623, "learning_rate": 7.395e-06, "loss": 0.1311, "step": 2468 }, { "epoch": 4.555863342566944, "grad_norm": 2.665177345275879, "learning_rate": 7.398000000000001e-06, "loss": 0.3888, "step": 2469 }, { "epoch": 4.557710064635272, "grad_norm": 2.436779260635376, "learning_rate": 7.401e-06, "loss": 0.3954, "step": 2470 }, { "epoch": 4.559556786703601, "grad_norm": 1.1689409017562866, "learning_rate": 7.404e-06, "loss": 0.34, "step": 2471 }, { "epoch": 4.56140350877193, "grad_norm": 1.4799636602401733, "learning_rate": 7.407e-06, "loss": 0.3183, "step": 2472 }, { "epoch": 4.563250230840259, "grad_norm": 2.0600669384002686, "learning_rate": 7.41e-06, "loss": 0.2514, "step": 2473 }, { "epoch": 4.565096952908587, "grad_norm": 1.1407296657562256, "learning_rate": 7.413e-06, "loss": 0.2809, "step": 2474 }, { "epoch": 4.566943674976916, "grad_norm": 1.2882170677185059, "learning_rate": 7.416000000000001e-06, "loss": 0.2511, "step": 2475 }, { "epoch": 4.568790397045245, "grad_norm": 1.4791680574417114, "learning_rate": 7.419e-06, "loss": 0.2621, "step": 2476 }, { "epoch": 4.570637119113574, "grad_norm": 1.2269575595855713, "learning_rate": 7.422e-06, "loss": 0.2375, "step": 2477 }, { "epoch": 4.572483841181902, "grad_norm": 1.0275866985321045, "learning_rate": 7.425e-06, "loss": 0.2019, "step": 2478 }, { "epoch": 4.574330563250231, "grad_norm": 2.3359060287475586, "learning_rate": 7.428e-06, "loss": 0.2382, "step": 2479 }, { "epoch": 4.576177285318559, "grad_norm": 0.9369570016860962, "learning_rate": 7.4310000000000005e-06, "loss": 0.1561, "step": 2480 }, { "epoch": 4.5780240073868885, "grad_norm": 1.2175058126449585, "learning_rate": 7.434e-06, "loss": 0.184, "step": 2481 }, { "epoch": 4.579870729455217, "grad_norm": 2.063988447189331, "learning_rate": 7.437e-06, "loss": 0.2426, "step": 2482 }, { "epoch": 4.581717451523546, "grad_norm": 1.105177640914917, "learning_rate": 7.44e-06, "loss": 0.1359, "step": 2483 }, { "epoch": 4.583564173591874, "grad_norm": 1.8581008911132812, "learning_rate": 7.443e-06, "loss": 0.1228, "step": 2484 }, { "epoch": 4.585410895660203, "grad_norm": 0.8880102634429932, "learning_rate": 7.446e-06, "loss": 0.1086, "step": 2485 }, { "epoch": 4.587257617728532, "grad_norm": 0.9845847487449646, "learning_rate": 7.449e-06, "loss": 0.0594, "step": 2486 }, { "epoch": 4.589104339796861, "grad_norm": 0.7028189897537231, "learning_rate": 7.452000000000001e-06, "loss": 0.0764, "step": 2487 }, { "epoch": 4.590951061865189, "grad_norm": 1.0322048664093018, "learning_rate": 7.455e-06, "loss": 0.0814, "step": 2488 }, { "epoch": 4.592797783933518, "grad_norm": 1.1624642610549927, "learning_rate": 7.4579999999999996e-06, "loss": 0.08, "step": 2489 }, { "epoch": 4.594644506001846, "grad_norm": 0.8388569951057434, "learning_rate": 7.461e-06, "loss": 0.0655, "step": 2490 }, { "epoch": 4.5964912280701755, "grad_norm": 0.7668055295944214, "learning_rate": 7.464e-06, "loss": 0.0857, "step": 2491 }, { "epoch": 4.598337950138504, "grad_norm": 1.1497136354446411, "learning_rate": 7.467000000000001e-06, "loss": 0.07, "step": 2492 }, { "epoch": 4.600184672206833, "grad_norm": 2.340991497039795, "learning_rate": 7.4700000000000005e-06, "loss": 0.0676, "step": 2493 }, { "epoch": 4.602031394275162, "grad_norm": 0.8587968349456787, "learning_rate": 7.4729999999999994e-06, "loss": 0.0703, "step": 2494 }, { "epoch": 4.60387811634349, "grad_norm": 2.3311848640441895, "learning_rate": 7.476e-06, "loss": 0.0863, "step": 2495 }, { "epoch": 4.605724838411819, "grad_norm": 0.7522993683815002, "learning_rate": 7.479e-06, "loss": 0.0512, "step": 2496 }, { "epoch": 4.607571560480148, "grad_norm": 0.8542543053627014, "learning_rate": 7.4820000000000005e-06, "loss": 0.0746, "step": 2497 }, { "epoch": 4.609418282548477, "grad_norm": 1.0095270872116089, "learning_rate": 7.485e-06, "loss": 0.0673, "step": 2498 }, { "epoch": 4.611265004616805, "grad_norm": 1.6802910566329956, "learning_rate": 7.488e-06, "loss": 0.1941, "step": 2499 }, { "epoch": 4.613111726685134, "grad_norm": 1.132878065109253, "learning_rate": 7.491e-06, "loss": 0.0946, "step": 2500 }, { "epoch": 4.6149584487534625, "grad_norm": 0.8775937557220459, "learning_rate": 7.494e-06, "loss": 0.0651, "step": 2501 }, { "epoch": 4.616805170821792, "grad_norm": 1.0279693603515625, "learning_rate": 7.497e-06, "loss": 0.0837, "step": 2502 }, { "epoch": 4.61865189289012, "grad_norm": 1.2879340648651123, "learning_rate": 7.5e-06, "loss": 0.0532, "step": 2503 }, { "epoch": 4.620498614958449, "grad_norm": 1.112121820449829, "learning_rate": 7.503e-06, "loss": 0.0604, "step": 2504 }, { "epoch": 4.622345337026777, "grad_norm": 1.0696443319320679, "learning_rate": 7.506e-06, "loss": 0.067, "step": 2505 }, { "epoch": 4.624192059095106, "grad_norm": 0.9257684350013733, "learning_rate": 7.5090000000000004e-06, "loss": 0.0728, "step": 2506 }, { "epoch": 4.626038781163435, "grad_norm": 1.5705357789993286, "learning_rate": 7.512e-06, "loss": 0.0926, "step": 2507 }, { "epoch": 4.627885503231764, "grad_norm": 1.1221293210983276, "learning_rate": 7.515e-06, "loss": 0.0859, "step": 2508 }, { "epoch": 4.629732225300092, "grad_norm": 1.043793797492981, "learning_rate": 7.518e-06, "loss": 0.0649, "step": 2509 }, { "epoch": 4.631578947368421, "grad_norm": 0.9244360327720642, "learning_rate": 7.521e-06, "loss": 0.0654, "step": 2510 }, { "epoch": 4.6334256694367495, "grad_norm": 1.8804733753204346, "learning_rate": 7.524000000000001e-06, "loss": 0.0901, "step": 2511 }, { "epoch": 4.635272391505079, "grad_norm": 2.1051440238952637, "learning_rate": 7.527000000000001e-06, "loss": 0.0578, "step": 2512 }, { "epoch": 4.637119113573407, "grad_norm": 1.860549807548523, "learning_rate": 7.53e-06, "loss": 0.1183, "step": 2513 }, { "epoch": 4.638965835641736, "grad_norm": 1.026725172996521, "learning_rate": 7.533e-06, "loss": 0.0936, "step": 2514 }, { "epoch": 4.640812557710064, "grad_norm": 1.1471177339553833, "learning_rate": 7.5359999999999995e-06, "loss": 0.0937, "step": 2515 }, { "epoch": 4.642659279778393, "grad_norm": 1.392022967338562, "learning_rate": 7.539000000000001e-06, "loss": 0.1107, "step": 2516 }, { "epoch": 4.644506001846722, "grad_norm": 1.7344036102294922, "learning_rate": 7.542000000000001e-06, "loss": 0.1169, "step": 2517 }, { "epoch": 4.646352723915051, "grad_norm": 1.535583734512329, "learning_rate": 7.545000000000001e-06, "loss": 0.1065, "step": 2518 }, { "epoch": 4.64819944598338, "grad_norm": 1.7330703735351562, "learning_rate": 7.5479999999999996e-06, "loss": 0.4917, "step": 2519 }, { "epoch": 4.650046168051708, "grad_norm": 1.3887149095535278, "learning_rate": 7.550999999999999e-06, "loss": 0.4331, "step": 2520 }, { "epoch": 4.6518928901200365, "grad_norm": 1.4955347776412964, "learning_rate": 7.554000000000001e-06, "loss": 0.3801, "step": 2521 }, { "epoch": 4.653739612188366, "grad_norm": 1.291242241859436, "learning_rate": 7.557000000000001e-06, "loss": 0.2607, "step": 2522 }, { "epoch": 4.655586334256695, "grad_norm": 1.6524022817611694, "learning_rate": 7.5600000000000005e-06, "loss": 0.3722, "step": 2523 }, { "epoch": 4.657433056325023, "grad_norm": 1.3549121618270874, "learning_rate": 7.563e-06, "loss": 0.2796, "step": 2524 }, { "epoch": 4.659279778393352, "grad_norm": 1.261399507522583, "learning_rate": 7.565999999999999e-06, "loss": 0.2414, "step": 2525 }, { "epoch": 4.66112650046168, "grad_norm": 1.4139667749404907, "learning_rate": 7.569000000000001e-06, "loss": 0.263, "step": 2526 }, { "epoch": 4.66297322253001, "grad_norm": 1.3193572759628296, "learning_rate": 7.5720000000000005e-06, "loss": 0.2835, "step": 2527 }, { "epoch": 4.664819944598338, "grad_norm": 1.1930153369903564, "learning_rate": 7.575e-06, "loss": 0.1887, "step": 2528 }, { "epoch": 4.666666666666667, "grad_norm": 1.1165175437927246, "learning_rate": 7.578e-06, "loss": 0.2039, "step": 2529 }, { "epoch": 4.668513388734995, "grad_norm": 1.2176852226257324, "learning_rate": 7.581e-06, "loss": 0.1993, "step": 2530 }, { "epoch": 4.670360110803324, "grad_norm": 1.6196171045303345, "learning_rate": 7.5840000000000006e-06, "loss": 0.3159, "step": 2531 }, { "epoch": 4.672206832871653, "grad_norm": 1.2227379083633423, "learning_rate": 7.587e-06, "loss": 0.1302, "step": 2532 }, { "epoch": 4.674053554939982, "grad_norm": 1.3840858936309814, "learning_rate": 7.59e-06, "loss": 0.1816, "step": 2533 }, { "epoch": 4.67590027700831, "grad_norm": 1.2817070484161377, "learning_rate": 7.593e-06, "loss": 0.1301, "step": 2534 }, { "epoch": 4.677746999076639, "grad_norm": 1.2893495559692383, "learning_rate": 7.596e-06, "loss": 0.1599, "step": 2535 }, { "epoch": 4.679593721144967, "grad_norm": 0.8995867967605591, "learning_rate": 7.599000000000001e-06, "loss": 0.0656, "step": 2536 }, { "epoch": 4.6814404432132966, "grad_norm": 1.0696057081222534, "learning_rate": 7.602e-06, "loss": 0.0714, "step": 2537 }, { "epoch": 4.683287165281625, "grad_norm": 0.9868329167366028, "learning_rate": 7.605e-06, "loss": 0.0726, "step": 2538 }, { "epoch": 4.685133887349954, "grad_norm": 1.6628823280334473, "learning_rate": 7.608e-06, "loss": 0.071, "step": 2539 }, { "epoch": 4.686980609418282, "grad_norm": 0.901323676109314, "learning_rate": 7.611e-06, "loss": 0.0726, "step": 2540 }, { "epoch": 4.688827331486611, "grad_norm": 0.9900251626968384, "learning_rate": 7.614000000000001e-06, "loss": 0.0818, "step": 2541 }, { "epoch": 4.69067405355494, "grad_norm": 0.9476147890090942, "learning_rate": 7.617000000000001e-06, "loss": 0.0863, "step": 2542 }, { "epoch": 4.692520775623269, "grad_norm": 1.3561960458755493, "learning_rate": 7.62e-06, "loss": 0.1161, "step": 2543 }, { "epoch": 4.694367497691598, "grad_norm": 0.802712619304657, "learning_rate": 7.623e-06, "loss": 0.0772, "step": 2544 }, { "epoch": 4.696214219759926, "grad_norm": 0.976687490940094, "learning_rate": 7.6259999999999995e-06, "loss": 0.0709, "step": 2545 }, { "epoch": 4.698060941828254, "grad_norm": 1.0002014636993408, "learning_rate": 7.629000000000001e-06, "loss": 0.0803, "step": 2546 }, { "epoch": 4.6999076638965835, "grad_norm": 1.0644595623016357, "learning_rate": 7.632e-06, "loss": 0.068, "step": 2547 }, { "epoch": 4.701754385964913, "grad_norm": 0.7027485370635986, "learning_rate": 7.635e-06, "loss": 0.0655, "step": 2548 }, { "epoch": 4.703601108033241, "grad_norm": 1.1962902545928955, "learning_rate": 7.638e-06, "loss": 0.0897, "step": 2549 }, { "epoch": 4.70544783010157, "grad_norm": 1.5846956968307495, "learning_rate": 7.641e-06, "loss": 0.1526, "step": 2550 }, { "epoch": 4.707294552169898, "grad_norm": 1.1864464282989502, "learning_rate": 7.644000000000002e-06, "loss": 0.086, "step": 2551 }, { "epoch": 4.7091412742382275, "grad_norm": 0.921712338924408, "learning_rate": 7.647000000000001e-06, "loss": 0.079, "step": 2552 }, { "epoch": 4.710987996306556, "grad_norm": 1.6384105682373047, "learning_rate": 7.65e-06, "loss": 0.0882, "step": 2553 }, { "epoch": 4.712834718374885, "grad_norm": 1.165614366531372, "learning_rate": 7.653e-06, "loss": 0.0982, "step": 2554 }, { "epoch": 4.714681440443213, "grad_norm": 0.9998936653137207, "learning_rate": 7.656e-06, "loss": 0.053, "step": 2555 }, { "epoch": 4.716528162511542, "grad_norm": 1.1780154705047607, "learning_rate": 7.659e-06, "loss": 0.0766, "step": 2556 }, { "epoch": 4.7183748845798705, "grad_norm": 0.8436416983604431, "learning_rate": 7.662e-06, "loss": 0.0739, "step": 2557 }, { "epoch": 4.7202216066482, "grad_norm": 0.9713981747627258, "learning_rate": 7.665e-06, "loss": 0.0897, "step": 2558 }, { "epoch": 4.722068328716528, "grad_norm": 0.9224056005477905, "learning_rate": 7.668e-06, "loss": 0.076, "step": 2559 }, { "epoch": 4.723915050784857, "grad_norm": 0.9956642389297485, "learning_rate": 7.671e-06, "loss": 0.0644, "step": 2560 }, { "epoch": 4.725761772853185, "grad_norm": 1.1300500631332397, "learning_rate": 7.674000000000001e-06, "loss": 0.0849, "step": 2561 }, { "epoch": 4.7276084949215145, "grad_norm": 1.3759992122650146, "learning_rate": 7.677000000000001e-06, "loss": 0.1103, "step": 2562 }, { "epoch": 4.729455216989843, "grad_norm": 1.100203514099121, "learning_rate": 7.680000000000001e-06, "loss": 0.0744, "step": 2563 }, { "epoch": 4.731301939058172, "grad_norm": 0.9975654482841492, "learning_rate": 7.683e-06, "loss": 0.0706, "step": 2564 }, { "epoch": 4.7331486611265, "grad_norm": 1.2560099363327026, "learning_rate": 7.685999999999999e-06, "loss": 0.0995, "step": 2565 }, { "epoch": 4.734995383194829, "grad_norm": 1.2191094160079956, "learning_rate": 7.688999999999999e-06, "loss": 0.0927, "step": 2566 }, { "epoch": 4.7368421052631575, "grad_norm": 1.7450473308563232, "learning_rate": 7.692e-06, "loss": 0.1354, "step": 2567 }, { "epoch": 4.738688827331487, "grad_norm": 1.9262582063674927, "learning_rate": 7.695e-06, "loss": 0.1264, "step": 2568 }, { "epoch": 4.740535549399816, "grad_norm": 2.491466999053955, "learning_rate": 7.698e-06, "loss": 0.4965, "step": 2569 }, { "epoch": 4.742382271468144, "grad_norm": 1.3091697692871094, "learning_rate": 7.701e-06, "loss": 0.3776, "step": 2570 }, { "epoch": 4.744228993536472, "grad_norm": 1.6606218814849854, "learning_rate": 7.704e-06, "loss": 0.3546, "step": 2571 }, { "epoch": 4.7460757156048015, "grad_norm": 1.1433162689208984, "learning_rate": 7.707000000000001e-06, "loss": 0.3365, "step": 2572 }, { "epoch": 4.747922437673131, "grad_norm": 1.1602765321731567, "learning_rate": 7.71e-06, "loss": 0.2869, "step": 2573 }, { "epoch": 4.749769159741459, "grad_norm": 1.4442332983016968, "learning_rate": 7.713e-06, "loss": 0.3196, "step": 2574 }, { "epoch": 4.751615881809788, "grad_norm": 1.1443811655044556, "learning_rate": 7.716e-06, "loss": 0.2504, "step": 2575 }, { "epoch": 4.753462603878116, "grad_norm": 1.018218994140625, "learning_rate": 7.719e-06, "loss": 0.2439, "step": 2576 }, { "epoch": 4.755309325946445, "grad_norm": 1.1695661544799805, "learning_rate": 7.722e-06, "loss": 0.2409, "step": 2577 }, { "epoch": 4.757156048014774, "grad_norm": 1.1299222707748413, "learning_rate": 7.725e-06, "loss": 0.1832, "step": 2578 }, { "epoch": 4.759002770083103, "grad_norm": 1.2177181243896484, "learning_rate": 7.728e-06, "loss": 0.2338, "step": 2579 }, { "epoch": 4.760849492151431, "grad_norm": 1.2508996725082397, "learning_rate": 7.731e-06, "loss": 0.1498, "step": 2580 }, { "epoch": 4.76269621421976, "grad_norm": 1.3955219984054565, "learning_rate": 7.733999999999999e-06, "loss": 0.1899, "step": 2581 }, { "epoch": 4.7645429362880884, "grad_norm": 1.0323314666748047, "learning_rate": 7.737e-06, "loss": 0.1173, "step": 2582 }, { "epoch": 4.766389658356418, "grad_norm": 0.8515747785568237, "learning_rate": 7.74e-06, "loss": 0.1056, "step": 2583 }, { "epoch": 4.768236380424746, "grad_norm": 0.7767705321311951, "learning_rate": 7.743e-06, "loss": 0.0839, "step": 2584 }, { "epoch": 4.770083102493075, "grad_norm": 0.8640003800392151, "learning_rate": 7.746e-06, "loss": 0.0661, "step": 2585 }, { "epoch": 4.771929824561403, "grad_norm": 0.7137645483016968, "learning_rate": 7.749e-06, "loss": 0.0633, "step": 2586 }, { "epoch": 4.773776546629732, "grad_norm": 0.9822434186935425, "learning_rate": 7.752000000000001e-06, "loss": 0.0957, "step": 2587 }, { "epoch": 4.775623268698061, "grad_norm": 0.9581803679466248, "learning_rate": 7.755000000000001e-06, "loss": 0.1328, "step": 2588 }, { "epoch": 4.77746999076639, "grad_norm": 1.1790051460266113, "learning_rate": 7.758000000000001e-06, "loss": 0.0914, "step": 2589 }, { "epoch": 4.779316712834718, "grad_norm": 0.988778829574585, "learning_rate": 7.760999999999999e-06, "loss": 0.0594, "step": 2590 }, { "epoch": 4.781163434903047, "grad_norm": 0.9305976629257202, "learning_rate": 7.763999999999999e-06, "loss": 0.0762, "step": 2591 }, { "epoch": 4.783010156971375, "grad_norm": 0.9188739061355591, "learning_rate": 7.767e-06, "loss": 0.0626, "step": 2592 }, { "epoch": 4.784856879039705, "grad_norm": 0.8577688336372375, "learning_rate": 7.77e-06, "loss": 0.0565, "step": 2593 }, { "epoch": 4.786703601108034, "grad_norm": 0.9031362533569336, "learning_rate": 7.773e-06, "loss": 0.0855, "step": 2594 }, { "epoch": 4.788550323176362, "grad_norm": 0.8325183987617493, "learning_rate": 7.776e-06, "loss": 0.0761, "step": 2595 }, { "epoch": 4.79039704524469, "grad_norm": 1.1880937814712524, "learning_rate": 7.779e-06, "loss": 0.0664, "step": 2596 }, { "epoch": 4.792243767313019, "grad_norm": 1.0195281505584717, "learning_rate": 7.782000000000001e-06, "loss": 0.0855, "step": 2597 }, { "epoch": 4.7940904893813485, "grad_norm": 0.9046306014060974, "learning_rate": 7.785000000000001e-06, "loss": 0.0784, "step": 2598 }, { "epoch": 4.795937211449677, "grad_norm": 0.8892627954483032, "learning_rate": 7.788e-06, "loss": 0.0648, "step": 2599 }, { "epoch": 4.797783933518006, "grad_norm": 1.1928547620773315, "learning_rate": 7.791e-06, "loss": 0.061, "step": 2600 }, { "epoch": 4.799630655586334, "grad_norm": 0.8939501047134399, "learning_rate": 7.794e-06, "loss": 0.0674, "step": 2601 }, { "epoch": 4.801477377654663, "grad_norm": 1.0067095756530762, "learning_rate": 7.797e-06, "loss": 0.0752, "step": 2602 }, { "epoch": 4.803324099722992, "grad_norm": 0.9221670627593994, "learning_rate": 7.8e-06, "loss": 0.0568, "step": 2603 }, { "epoch": 4.805170821791321, "grad_norm": 0.8903829455375671, "learning_rate": 7.803e-06, "loss": 0.0748, "step": 2604 }, { "epoch": 4.807017543859649, "grad_norm": 1.2078776359558105, "learning_rate": 7.806e-06, "loss": 0.0936, "step": 2605 }, { "epoch": 4.808864265927978, "grad_norm": 1.2025963068008423, "learning_rate": 7.809e-06, "loss": 0.0626, "step": 2606 }, { "epoch": 4.810710987996306, "grad_norm": 2.6398403644561768, "learning_rate": 7.812e-06, "loss": 0.0786, "step": 2607 }, { "epoch": 4.8125577100646355, "grad_norm": 1.0301458835601807, "learning_rate": 7.815e-06, "loss": 0.0955, "step": 2608 }, { "epoch": 4.814404432132964, "grad_norm": 1.054529070854187, "learning_rate": 7.818e-06, "loss": 0.0897, "step": 2609 }, { "epoch": 4.816251154201293, "grad_norm": 0.6876257658004761, "learning_rate": 7.821e-06, "loss": 0.0672, "step": 2610 }, { "epoch": 4.818097876269621, "grad_norm": 1.0190913677215576, "learning_rate": 7.824e-06, "loss": 0.0908, "step": 2611 }, { "epoch": 4.81994459833795, "grad_norm": 1.0644663572311401, "learning_rate": 7.827000000000001e-06, "loss": 0.0886, "step": 2612 }, { "epoch": 4.821791320406279, "grad_norm": 1.4151818752288818, "learning_rate": 7.830000000000001e-06, "loss": 0.0797, "step": 2613 }, { "epoch": 4.823638042474608, "grad_norm": 1.2000187635421753, "learning_rate": 7.833e-06, "loss": 0.079, "step": 2614 }, { "epoch": 4.825484764542936, "grad_norm": 0.8478805422782898, "learning_rate": 7.836e-06, "loss": 0.0714, "step": 2615 }, { "epoch": 4.827331486611265, "grad_norm": 0.9508643746376038, "learning_rate": 7.838999999999999e-06, "loss": 0.0558, "step": 2616 }, { "epoch": 4.829178208679593, "grad_norm": 1.2372348308563232, "learning_rate": 7.842e-06, "loss": 0.1049, "step": 2617 }, { "epoch": 4.8310249307479225, "grad_norm": 1.6176644563674927, "learning_rate": 7.845e-06, "loss": 0.1252, "step": 2618 }, { "epoch": 4.832871652816252, "grad_norm": 1.7493839263916016, "learning_rate": 7.848e-06, "loss": 0.5302, "step": 2619 }, { "epoch": 4.83471837488458, "grad_norm": 1.7353014945983887, "learning_rate": 7.851e-06, "loss": 0.4991, "step": 2620 }, { "epoch": 4.836565096952908, "grad_norm": 1.6778644323349, "learning_rate": 7.854e-06, "loss": 0.2939, "step": 2621 }, { "epoch": 4.838411819021237, "grad_norm": 1.1693828105926514, "learning_rate": 7.857000000000001e-06, "loss": 0.3351, "step": 2622 }, { "epoch": 4.840258541089566, "grad_norm": 1.0527209043502808, "learning_rate": 7.860000000000001e-06, "loss": 0.3107, "step": 2623 }, { "epoch": 4.842105263157895, "grad_norm": 1.8866925239562988, "learning_rate": 7.863e-06, "loss": 0.3261, "step": 2624 }, { "epoch": 4.843951985226224, "grad_norm": 1.1227540969848633, "learning_rate": 7.866e-06, "loss": 0.2156, "step": 2625 }, { "epoch": 4.845798707294552, "grad_norm": 1.33430016040802, "learning_rate": 7.868999999999999e-06, "loss": 0.2171, "step": 2626 }, { "epoch": 4.847645429362881, "grad_norm": 1.201172113418579, "learning_rate": 7.872e-06, "loss": 0.2086, "step": 2627 }, { "epoch": 4.8494921514312095, "grad_norm": 1.0619585514068604, "learning_rate": 7.875e-06, "loss": 0.1949, "step": 2628 }, { "epoch": 4.851338873499539, "grad_norm": 1.5394426584243774, "learning_rate": 7.878e-06, "loss": 0.1855, "step": 2629 }, { "epoch": 4.853185595567867, "grad_norm": 1.0706628561019897, "learning_rate": 7.881e-06, "loss": 0.1521, "step": 2630 }, { "epoch": 4.855032317636196, "grad_norm": 2.8826377391815186, "learning_rate": 7.884e-06, "loss": 0.1697, "step": 2631 }, { "epoch": 4.856879039704524, "grad_norm": 1.1852144002914429, "learning_rate": 7.887000000000001e-06, "loss": 0.1184, "step": 2632 }, { "epoch": 4.858725761772853, "grad_norm": 1.0162383317947388, "learning_rate": 7.89e-06, "loss": 0.1214, "step": 2633 }, { "epoch": 4.860572483841182, "grad_norm": 1.0937857627868652, "learning_rate": 7.893e-06, "loss": 0.095, "step": 2634 }, { "epoch": 4.862419205909511, "grad_norm": 0.9450917840003967, "learning_rate": 7.896e-06, "loss": 0.1052, "step": 2635 }, { "epoch": 4.864265927977839, "grad_norm": 2.6459155082702637, "learning_rate": 7.899e-06, "loss": 0.1139, "step": 2636 }, { "epoch": 4.866112650046168, "grad_norm": 1.0539472103118896, "learning_rate": 7.902000000000002e-06, "loss": 0.1326, "step": 2637 }, { "epoch": 4.8679593721144965, "grad_norm": 0.7081450819969177, "learning_rate": 7.905000000000001e-06, "loss": 0.0584, "step": 2638 }, { "epoch": 4.869806094182826, "grad_norm": 1.5462771654129028, "learning_rate": 7.908e-06, "loss": 0.0813, "step": 2639 }, { "epoch": 4.871652816251154, "grad_norm": 1.0687777996063232, "learning_rate": 7.911e-06, "loss": 0.0553, "step": 2640 }, { "epoch": 4.873499538319483, "grad_norm": 0.6855216026306152, "learning_rate": 7.913999999999999e-06, "loss": 0.0657, "step": 2641 }, { "epoch": 4.875346260387811, "grad_norm": 0.9286635518074036, "learning_rate": 7.917e-06, "loss": 0.0749, "step": 2642 }, { "epoch": 4.87719298245614, "grad_norm": 1.0680339336395264, "learning_rate": 7.92e-06, "loss": 0.0824, "step": 2643 }, { "epoch": 4.87903970452447, "grad_norm": 1.5928995609283447, "learning_rate": 7.923e-06, "loss": 0.0837, "step": 2644 }, { "epoch": 4.880886426592798, "grad_norm": 0.8490452170372009, "learning_rate": 7.926e-06, "loss": 0.0656, "step": 2645 }, { "epoch": 4.882733148661126, "grad_norm": 1.2324718236923218, "learning_rate": 7.929e-06, "loss": 0.0564, "step": 2646 }, { "epoch": 4.884579870729455, "grad_norm": 1.1055073738098145, "learning_rate": 7.932000000000001e-06, "loss": 0.0643, "step": 2647 }, { "epoch": 4.886426592797784, "grad_norm": 0.7276487350463867, "learning_rate": 7.935000000000001e-06, "loss": 0.0695, "step": 2648 }, { "epoch": 4.888273314866113, "grad_norm": 1.3281259536743164, "learning_rate": 7.938000000000001e-06, "loss": 0.0939, "step": 2649 }, { "epoch": 4.890120036934442, "grad_norm": 0.9045303463935852, "learning_rate": 7.941e-06, "loss": 0.0646, "step": 2650 }, { "epoch": 4.89196675900277, "grad_norm": 0.9664737582206726, "learning_rate": 7.943999999999999e-06, "loss": 0.0664, "step": 2651 }, { "epoch": 4.893813481071099, "grad_norm": 0.933375895023346, "learning_rate": 7.947e-06, "loss": 0.0645, "step": 2652 }, { "epoch": 4.895660203139427, "grad_norm": 1.1591026782989502, "learning_rate": 7.95e-06, "loss": 0.077, "step": 2653 }, { "epoch": 4.8975069252077565, "grad_norm": 1.2264528274536133, "learning_rate": 7.953e-06, "loss": 0.1174, "step": 2654 }, { "epoch": 4.899353647276085, "grad_norm": 1.124033808708191, "learning_rate": 7.956e-06, "loss": 0.0751, "step": 2655 }, { "epoch": 4.901200369344414, "grad_norm": 0.8462250232696533, "learning_rate": 7.959e-06, "loss": 0.0722, "step": 2656 }, { "epoch": 4.903047091412742, "grad_norm": 1.0723295211791992, "learning_rate": 7.962000000000001e-06, "loss": 0.0808, "step": 2657 }, { "epoch": 4.904893813481071, "grad_norm": 1.098881483078003, "learning_rate": 7.965e-06, "loss": 0.0609, "step": 2658 }, { "epoch": 4.9067405355494, "grad_norm": 0.9864880442619324, "learning_rate": 7.968e-06, "loss": 0.0778, "step": 2659 }, { "epoch": 4.908587257617729, "grad_norm": 0.7928457856178284, "learning_rate": 7.971e-06, "loss": 0.0657, "step": 2660 }, { "epoch": 4.910433979686057, "grad_norm": 1.0289839506149292, "learning_rate": 7.974e-06, "loss": 0.0713, "step": 2661 }, { "epoch": 4.912280701754386, "grad_norm": 1.0452907085418701, "learning_rate": 7.977000000000002e-06, "loss": 0.0615, "step": 2662 }, { "epoch": 4.914127423822714, "grad_norm": 0.8897021412849426, "learning_rate": 7.98e-06, "loss": 0.0674, "step": 2663 }, { "epoch": 4.9159741458910435, "grad_norm": 1.2523964643478394, "learning_rate": 7.983e-06, "loss": 0.0769, "step": 2664 }, { "epoch": 4.917820867959372, "grad_norm": 1.1305283308029175, "learning_rate": 7.986e-06, "loss": 0.0674, "step": 2665 }, { "epoch": 4.919667590027701, "grad_norm": 1.435927391052246, "learning_rate": 7.989e-06, "loss": 0.0695, "step": 2666 }, { "epoch": 4.921514312096029, "grad_norm": 1.2424691915512085, "learning_rate": 7.992e-06, "loss": 0.0838, "step": 2667 }, { "epoch": 4.923361034164358, "grad_norm": 1.2204331159591675, "learning_rate": 7.995e-06, "loss": 0.0693, "step": 2668 }, { "epoch": 4.9252077562326875, "grad_norm": 2.060532331466675, "learning_rate": 7.998e-06, "loss": 0.4751, "step": 2669 }, { "epoch": 4.927054478301016, "grad_norm": 1.386932611465454, "learning_rate": 8.001e-06, "loss": 0.3697, "step": 2670 }, { "epoch": 4.928901200369344, "grad_norm": 1.1898244619369507, "learning_rate": 8.004e-06, "loss": 0.3438, "step": 2671 }, { "epoch": 4.930747922437673, "grad_norm": 10.301701545715332, "learning_rate": 8.007000000000001e-06, "loss": 0.3882, "step": 2672 }, { "epoch": 4.932594644506002, "grad_norm": 1.6150550842285156, "learning_rate": 8.010000000000001e-06, "loss": 0.2899, "step": 2673 }, { "epoch": 4.9344413665743305, "grad_norm": 1.1645244359970093, "learning_rate": 8.013000000000001e-06, "loss": 0.2327, "step": 2674 }, { "epoch": 4.93628808864266, "grad_norm": 1.136450171470642, "learning_rate": 8.016e-06, "loss": 0.2859, "step": 2675 }, { "epoch": 4.938134810710988, "grad_norm": 1.1616744995117188, "learning_rate": 8.018999999999999e-06, "loss": 0.2183, "step": 2676 }, { "epoch": 4.939981532779317, "grad_norm": 1.301045536994934, "learning_rate": 8.022e-06, "loss": 0.2169, "step": 2677 }, { "epoch": 4.941828254847645, "grad_norm": 0.9416823983192444, "learning_rate": 8.025e-06, "loss": 0.1446, "step": 2678 }, { "epoch": 4.9436749769159745, "grad_norm": 1.5150599479675293, "learning_rate": 8.028e-06, "loss": 0.1712, "step": 2679 }, { "epoch": 4.945521698984303, "grad_norm": 0.7367508411407471, "learning_rate": 8.031e-06, "loss": 0.096, "step": 2680 }, { "epoch": 4.947368421052632, "grad_norm": 0.8093917965888977, "learning_rate": 8.034e-06, "loss": 0.0702, "step": 2681 }, { "epoch": 4.94921514312096, "grad_norm": 0.9393030405044556, "learning_rate": 8.037000000000001e-06, "loss": 0.0895, "step": 2682 }, { "epoch": 4.951061865189289, "grad_norm": 0.849078893661499, "learning_rate": 8.040000000000001e-06, "loss": 0.0874, "step": 2683 }, { "epoch": 4.9529085872576175, "grad_norm": 1.019606590270996, "learning_rate": 8.043e-06, "loss": 0.0846, "step": 2684 }, { "epoch": 4.954755309325947, "grad_norm": 0.8608704805374146, "learning_rate": 8.046e-06, "loss": 0.0578, "step": 2685 }, { "epoch": 4.956602031394275, "grad_norm": 1.1765228509902954, "learning_rate": 8.049e-06, "loss": 0.1218, "step": 2686 }, { "epoch": 4.958448753462604, "grad_norm": 0.8517112135887146, "learning_rate": 8.052000000000002e-06, "loss": 0.0685, "step": 2687 }, { "epoch": 4.960295475530932, "grad_norm": 0.9209573864936829, "learning_rate": 8.055e-06, "loss": 0.0558, "step": 2688 }, { "epoch": 4.9621421975992615, "grad_norm": 0.9230965971946716, "learning_rate": 8.058e-06, "loss": 0.0647, "step": 2689 }, { "epoch": 4.96398891966759, "grad_norm": 1.2790827751159668, "learning_rate": 8.061e-06, "loss": 0.1008, "step": 2690 }, { "epoch": 4.965835641735919, "grad_norm": 1.0466139316558838, "learning_rate": 8.064e-06, "loss": 0.0827, "step": 2691 }, { "epoch": 4.967682363804247, "grad_norm": 1.233939528465271, "learning_rate": 8.067e-06, "loss": 0.0685, "step": 2692 }, { "epoch": 4.969529085872576, "grad_norm": 1.100265622138977, "learning_rate": 8.07e-06, "loss": 0.055, "step": 2693 }, { "epoch": 4.971375807940905, "grad_norm": 1.711647868156433, "learning_rate": 8.073e-06, "loss": 0.063, "step": 2694 }, { "epoch": 4.973222530009234, "grad_norm": 0.9649916887283325, "learning_rate": 8.076e-06, "loss": 0.066, "step": 2695 }, { "epoch": 4.975069252077562, "grad_norm": 1.0252290964126587, "learning_rate": 8.079e-06, "loss": 0.064, "step": 2696 }, { "epoch": 4.976915974145891, "grad_norm": 0.9084988236427307, "learning_rate": 8.082e-06, "loss": 0.0637, "step": 2697 }, { "epoch": 4.97876269621422, "grad_norm": 0.6036769151687622, "learning_rate": 8.085000000000001e-06, "loss": 0.0437, "step": 2698 }, { "epoch": 4.980609418282548, "grad_norm": 1.1912896633148193, "learning_rate": 8.088000000000001e-06, "loss": 0.1391, "step": 2699 }, { "epoch": 4.982456140350877, "grad_norm": 1.3569239377975464, "learning_rate": 8.091e-06, "loss": 0.0746, "step": 2700 }, { "epoch": 4.984302862419206, "grad_norm": 1.2725087404251099, "learning_rate": 8.093999999999999e-06, "loss": 0.0636, "step": 2701 }, { "epoch": 4.986149584487535, "grad_norm": 2.4054908752441406, "learning_rate": 8.096999999999999e-06, "loss": 0.0793, "step": 2702 }, { "epoch": 4.987996306555863, "grad_norm": 1.1511203050613403, "learning_rate": 8.1e-06, "loss": 0.0836, "step": 2703 }, { "epoch": 4.989843028624192, "grad_norm": 1.0939910411834717, "learning_rate": 8.103e-06, "loss": 0.0942, "step": 2704 }, { "epoch": 4.991689750692521, "grad_norm": 1.0632432699203491, "learning_rate": 8.106e-06, "loss": 0.085, "step": 2705 }, { "epoch": 4.99353647276085, "grad_norm": 1.101324439048767, "learning_rate": 8.109e-06, "loss": 0.0752, "step": 2706 }, { "epoch": 4.995383194829178, "grad_norm": 1.3632900714874268, "learning_rate": 8.112e-06, "loss": 0.0974, "step": 2707 }, { "epoch": 4.997229916897507, "grad_norm": 1.1220369338989258, "learning_rate": 8.115000000000001e-06, "loss": 0.0876, "step": 2708 }, { "epoch": 4.999076638965835, "grad_norm": 1.160844087600708, "learning_rate": 8.118000000000001e-06, "loss": 0.0957, "step": 2709 }, { "epoch": 5.0, "grad_norm": 0.7586629986763, "learning_rate": 8.121e-06, "loss": 0.0444, "step": 2710 }, { "epoch": 5.001846722068329, "grad_norm": 3.188286781311035, "learning_rate": 8.124e-06, "loss": 0.5124, "step": 2711 }, { "epoch": 5.003693444136657, "grad_norm": 1.9085744619369507, "learning_rate": 8.126999999999999e-06, "loss": 0.3812, "step": 2712 }, { "epoch": 5.0055401662049865, "grad_norm": 1.2450673580169678, "learning_rate": 8.13e-06, "loss": 0.2909, "step": 2713 }, { "epoch": 5.007386888273315, "grad_norm": 2.713906764984131, "learning_rate": 8.133e-06, "loss": 0.3009, "step": 2714 }, { "epoch": 5.009233610341644, "grad_norm": 1.3698234558105469, "learning_rate": 8.136e-06, "loss": 0.2429, "step": 2715 }, { "epoch": 5.011080332409972, "grad_norm": 1.40462064743042, "learning_rate": 8.139e-06, "loss": 0.1949, "step": 2716 }, { "epoch": 5.012927054478301, "grad_norm": 1.9252722263336182, "learning_rate": 8.142e-06, "loss": 0.2235, "step": 2717 }, { "epoch": 5.01477377654663, "grad_norm": 1.615817666053772, "learning_rate": 8.145e-06, "loss": 0.3593, "step": 2718 }, { "epoch": 5.016620498614959, "grad_norm": 0.8248971104621887, "learning_rate": 8.148e-06, "loss": 0.164, "step": 2719 }, { "epoch": 5.018467220683287, "grad_norm": 1.431121826171875, "learning_rate": 8.151e-06, "loss": 0.1736, "step": 2720 }, { "epoch": 5.020313942751616, "grad_norm": 2.1033153533935547, "learning_rate": 8.154e-06, "loss": 0.1873, "step": 2721 }, { "epoch": 5.022160664819944, "grad_norm": 0.9570878148078918, "learning_rate": 8.157e-06, "loss": 0.1279, "step": 2722 }, { "epoch": 5.0240073868882735, "grad_norm": 0.952393651008606, "learning_rate": 8.160000000000001e-06, "loss": 0.146, "step": 2723 }, { "epoch": 5.025854108956602, "grad_norm": 1.0486096143722534, "learning_rate": 8.163000000000001e-06, "loss": 0.1244, "step": 2724 }, { "epoch": 5.027700831024931, "grad_norm": 0.9891552329063416, "learning_rate": 8.166e-06, "loss": 0.1664, "step": 2725 }, { "epoch": 5.029547553093259, "grad_norm": 1.0865612030029297, "learning_rate": 8.169e-06, "loss": 0.1581, "step": 2726 }, { "epoch": 5.031394275161588, "grad_norm": 0.7026161551475525, "learning_rate": 8.171999999999999e-06, "loss": 0.0739, "step": 2727 }, { "epoch": 5.033240997229917, "grad_norm": 0.732552170753479, "learning_rate": 8.175e-06, "loss": 0.0691, "step": 2728 }, { "epoch": 5.035087719298246, "grad_norm": 0.8606172204017639, "learning_rate": 8.178e-06, "loss": 0.0759, "step": 2729 }, { "epoch": 5.036934441366574, "grad_norm": 0.9478769898414612, "learning_rate": 8.181e-06, "loss": 0.0805, "step": 2730 }, { "epoch": 5.038781163434903, "grad_norm": 1.0164841413497925, "learning_rate": 8.184e-06, "loss": 0.0735, "step": 2731 }, { "epoch": 5.040627885503231, "grad_norm": 0.8229144811630249, "learning_rate": 8.187e-06, "loss": 0.0715, "step": 2732 }, { "epoch": 5.0424746075715605, "grad_norm": 0.8915043473243713, "learning_rate": 8.190000000000001e-06, "loss": 0.064, "step": 2733 }, { "epoch": 5.044321329639889, "grad_norm": 0.8569628000259399, "learning_rate": 8.193000000000001e-06, "loss": 0.0529, "step": 2734 }, { "epoch": 5.046168051708218, "grad_norm": 0.8679004311561584, "learning_rate": 8.196e-06, "loss": 0.0498, "step": 2735 }, { "epoch": 5.048014773776547, "grad_norm": 0.686722457408905, "learning_rate": 8.199e-06, "loss": 0.0619, "step": 2736 }, { "epoch": 5.049861495844875, "grad_norm": 0.7008607387542725, "learning_rate": 8.201999999999999e-06, "loss": 0.0639, "step": 2737 }, { "epoch": 5.0517082179132045, "grad_norm": 0.7146738767623901, "learning_rate": 8.205e-06, "loss": 0.0553, "step": 2738 }, { "epoch": 5.053554939981533, "grad_norm": 1.0253753662109375, "learning_rate": 8.208e-06, "loss": 0.0733, "step": 2739 }, { "epoch": 5.055401662049862, "grad_norm": 0.7840259671211243, "learning_rate": 8.211e-06, "loss": 0.0603, "step": 2740 }, { "epoch": 5.05724838411819, "grad_norm": 0.9601883888244629, "learning_rate": 8.214e-06, "loss": 0.0807, "step": 2741 }, { "epoch": 5.059095106186519, "grad_norm": 1.1228169202804565, "learning_rate": 8.217e-06, "loss": 0.0719, "step": 2742 }, { "epoch": 5.0609418282548475, "grad_norm": 0.6802356243133545, "learning_rate": 8.220000000000001e-06, "loss": 0.0376, "step": 2743 }, { "epoch": 5.062788550323177, "grad_norm": 0.7134103775024414, "learning_rate": 8.223e-06, "loss": 0.0496, "step": 2744 }, { "epoch": 5.064635272391505, "grad_norm": 1.4297904968261719, "learning_rate": 8.226e-06, "loss": 0.0489, "step": 2745 }, { "epoch": 5.066481994459834, "grad_norm": 1.171958088874817, "learning_rate": 8.229e-06, "loss": 0.0549, "step": 2746 }, { "epoch": 5.068328716528162, "grad_norm": 0.9930132627487183, "learning_rate": 8.232e-06, "loss": 0.0595, "step": 2747 }, { "epoch": 5.0701754385964914, "grad_norm": 0.6678305864334106, "learning_rate": 8.235000000000002e-06, "loss": 0.0474, "step": 2748 }, { "epoch": 5.07202216066482, "grad_norm": 1.126110315322876, "learning_rate": 8.238e-06, "loss": 0.0738, "step": 2749 }, { "epoch": 5.073868882733149, "grad_norm": 1.4353430271148682, "learning_rate": 8.241e-06, "loss": 0.0755, "step": 2750 }, { "epoch": 5.075715604801477, "grad_norm": 1.0827726125717163, "learning_rate": 8.244e-06, "loss": 0.0937, "step": 2751 }, { "epoch": 5.077562326869806, "grad_norm": 1.1548527479171753, "learning_rate": 8.246999999999999e-06, "loss": 0.0702, "step": 2752 }, { "epoch": 5.0794090489381345, "grad_norm": 1.123356819152832, "learning_rate": 8.25e-06, "loss": 0.0709, "step": 2753 }, { "epoch": 5.081255771006464, "grad_norm": 1.2064100503921509, "learning_rate": 8.253e-06, "loss": 0.0902, "step": 2754 }, { "epoch": 5.083102493074792, "grad_norm": 1.0698316097259521, "learning_rate": 8.256e-06, "loss": 0.068, "step": 2755 }, { "epoch": 5.084949215143121, "grad_norm": 0.9638137817382812, "learning_rate": 8.259e-06, "loss": 0.0562, "step": 2756 }, { "epoch": 5.086795937211449, "grad_norm": 0.931310772895813, "learning_rate": 8.262e-06, "loss": 0.0577, "step": 2757 }, { "epoch": 5.088642659279778, "grad_norm": 2.2462685108184814, "learning_rate": 8.265000000000001e-06, "loss": 0.0998, "step": 2758 }, { "epoch": 5.090489381348107, "grad_norm": 1.3807369470596313, "learning_rate": 8.268000000000001e-06, "loss": 0.085, "step": 2759 }, { "epoch": 5.092336103416436, "grad_norm": 1.427977204322815, "learning_rate": 8.271000000000001e-06, "loss": 0.079, "step": 2760 }, { "epoch": 5.094182825484765, "grad_norm": 1.4242230653762817, "learning_rate": 8.274e-06, "loss": 0.3582, "step": 2761 }, { "epoch": 5.096029547553093, "grad_norm": 1.1141996383666992, "learning_rate": 8.276999999999999e-06, "loss": 0.3369, "step": 2762 }, { "epoch": 5.097876269621422, "grad_norm": 0.9965702295303345, "learning_rate": 8.28e-06, "loss": 0.3146, "step": 2763 }, { "epoch": 5.099722991689751, "grad_norm": 1.071621298789978, "learning_rate": 8.283e-06, "loss": 0.3233, "step": 2764 }, { "epoch": 5.10156971375808, "grad_norm": 0.9454147219657898, "learning_rate": 8.286e-06, "loss": 0.2085, "step": 2765 }, { "epoch": 5.103416435826408, "grad_norm": 2.2237796783447266, "learning_rate": 8.289e-06, "loss": 0.2989, "step": 2766 }, { "epoch": 5.105263157894737, "grad_norm": 1.0580756664276123, "learning_rate": 8.292e-06, "loss": 0.2079, "step": 2767 }, { "epoch": 5.107109879963065, "grad_norm": 0.9704204201698303, "learning_rate": 8.295000000000001e-06, "loss": 0.2233, "step": 2768 }, { "epoch": 5.108956602031395, "grad_norm": 0.8892659544944763, "learning_rate": 8.298000000000001e-06, "loss": 0.1835, "step": 2769 }, { "epoch": 5.110803324099723, "grad_norm": 1.0301181077957153, "learning_rate": 8.301e-06, "loss": 0.2356, "step": 2770 }, { "epoch": 5.112650046168052, "grad_norm": 1.033872127532959, "learning_rate": 8.304e-06, "loss": 0.1523, "step": 2771 }, { "epoch": 5.11449676823638, "grad_norm": 0.9132250547409058, "learning_rate": 8.307e-06, "loss": 0.137, "step": 2772 }, { "epoch": 5.116343490304709, "grad_norm": 0.7538077235221863, "learning_rate": 8.310000000000002e-06, "loss": 0.1323, "step": 2773 }, { "epoch": 5.118190212373038, "grad_norm": 0.8518770933151245, "learning_rate": 8.313e-06, "loss": 0.1159, "step": 2774 }, { "epoch": 5.120036934441367, "grad_norm": 0.7349260449409485, "learning_rate": 8.316e-06, "loss": 0.0928, "step": 2775 }, { "epoch": 5.121883656509695, "grad_norm": 0.7747098803520203, "learning_rate": 8.319e-06, "loss": 0.1134, "step": 2776 }, { "epoch": 5.123730378578024, "grad_norm": 0.9408161640167236, "learning_rate": 8.322e-06, "loss": 0.1117, "step": 2777 }, { "epoch": 5.125577100646352, "grad_norm": 0.9748533368110657, "learning_rate": 8.325e-06, "loss": 0.1154, "step": 2778 }, { "epoch": 5.127423822714682, "grad_norm": 0.6912077069282532, "learning_rate": 8.328e-06, "loss": 0.0687, "step": 2779 }, { "epoch": 5.12927054478301, "grad_norm": 0.6826411485671997, "learning_rate": 8.331e-06, "loss": 0.066, "step": 2780 }, { "epoch": 5.131117266851339, "grad_norm": 0.971698522567749, "learning_rate": 8.334e-06, "loss": 0.1017, "step": 2781 }, { "epoch": 5.132963988919667, "grad_norm": 0.8723594546318054, "learning_rate": 8.337e-06, "loss": 0.0594, "step": 2782 }, { "epoch": 5.134810710987996, "grad_norm": 0.8311214447021484, "learning_rate": 8.340000000000001e-06, "loss": 0.0874, "step": 2783 }, { "epoch": 5.136657433056325, "grad_norm": 0.7077476978302002, "learning_rate": 8.343000000000001e-06, "loss": 0.0512, "step": 2784 }, { "epoch": 5.138504155124654, "grad_norm": 0.9080737233161926, "learning_rate": 8.346000000000001e-06, "loss": 0.0783, "step": 2785 }, { "epoch": 5.140350877192983, "grad_norm": 0.6995786428451538, "learning_rate": 8.349e-06, "loss": 0.0563, "step": 2786 }, { "epoch": 5.142197599261311, "grad_norm": 0.9258413314819336, "learning_rate": 8.351999999999999e-06, "loss": 0.0596, "step": 2787 }, { "epoch": 5.14404432132964, "grad_norm": 1.9468938112258911, "learning_rate": 8.355e-06, "loss": 0.0477, "step": 2788 }, { "epoch": 5.1458910433979685, "grad_norm": 0.7890531420707703, "learning_rate": 8.358e-06, "loss": 0.0528, "step": 2789 }, { "epoch": 5.147737765466298, "grad_norm": 0.7697831988334656, "learning_rate": 8.361e-06, "loss": 0.0517, "step": 2790 }, { "epoch": 5.149584487534626, "grad_norm": 0.9314472675323486, "learning_rate": 8.364e-06, "loss": 0.0489, "step": 2791 }, { "epoch": 5.151431209602955, "grad_norm": 0.9839686751365662, "learning_rate": 8.367e-06, "loss": 0.0615, "step": 2792 }, { "epoch": 5.153277931671283, "grad_norm": 0.616888701915741, "learning_rate": 8.370000000000001e-06, "loss": 0.0448, "step": 2793 }, { "epoch": 5.1551246537396125, "grad_norm": 1.0835751295089722, "learning_rate": 8.373000000000001e-06, "loss": 0.048, "step": 2794 }, { "epoch": 5.156971375807941, "grad_norm": 1.1965879201889038, "learning_rate": 8.376e-06, "loss": 0.0938, "step": 2795 }, { "epoch": 5.15881809787627, "grad_norm": 0.7703157663345337, "learning_rate": 8.379e-06, "loss": 0.0645, "step": 2796 }, { "epoch": 5.160664819944598, "grad_norm": 0.8498938679695129, "learning_rate": 8.382e-06, "loss": 0.0523, "step": 2797 }, { "epoch": 5.162511542012927, "grad_norm": 0.8971265554428101, "learning_rate": 8.385e-06, "loss": 0.0513, "step": 2798 }, { "epoch": 5.1643582640812555, "grad_norm": 0.8701927661895752, "learning_rate": 8.388e-06, "loss": 0.0518, "step": 2799 }, { "epoch": 5.166204986149585, "grad_norm": 0.7444379329681396, "learning_rate": 8.391e-06, "loss": 0.0429, "step": 2800 }, { "epoch": 5.168051708217913, "grad_norm": 1.0041154623031616, "learning_rate": 8.394e-06, "loss": 0.0661, "step": 2801 }, { "epoch": 5.169898430286242, "grad_norm": 0.8491671681404114, "learning_rate": 8.397e-06, "loss": 0.0752, "step": 2802 }, { "epoch": 5.17174515235457, "grad_norm": 0.8518207669258118, "learning_rate": 8.400000000000001e-06, "loss": 0.0624, "step": 2803 }, { "epoch": 5.1735918744228995, "grad_norm": 0.9227288961410522, "learning_rate": 8.403e-06, "loss": 0.0538, "step": 2804 }, { "epoch": 5.175438596491228, "grad_norm": 0.8876579999923706, "learning_rate": 8.406e-06, "loss": 0.0474, "step": 2805 }, { "epoch": 5.177285318559557, "grad_norm": 2.571634531021118, "learning_rate": 8.409e-06, "loss": 0.0456, "step": 2806 }, { "epoch": 5.179132040627885, "grad_norm": 0.7877750396728516, "learning_rate": 8.412e-06, "loss": 0.0517, "step": 2807 }, { "epoch": 5.180978762696214, "grad_norm": 1.27826988697052, "learning_rate": 8.415000000000002e-06, "loss": 0.109, "step": 2808 }, { "epoch": 5.1828254847645425, "grad_norm": 1.2227567434310913, "learning_rate": 8.418000000000001e-06, "loss": 0.0814, "step": 2809 }, { "epoch": 5.184672206832872, "grad_norm": 1.4295302629470825, "learning_rate": 8.421000000000001e-06, "loss": 0.0789, "step": 2810 }, { "epoch": 5.186518928901201, "grad_norm": 2.2919962406158447, "learning_rate": 8.424e-06, "loss": 0.3895, "step": 2811 }, { "epoch": 5.188365650969529, "grad_norm": 1.3867071866989136, "learning_rate": 8.426999999999999e-06, "loss": 0.3742, "step": 2812 }, { "epoch": 5.190212373037858, "grad_norm": 1.6625173091888428, "learning_rate": 8.43e-06, "loss": 0.4138, "step": 2813 }, { "epoch": 5.1920590951061865, "grad_norm": 1.237267255783081, "learning_rate": 8.433e-06, "loss": 0.3137, "step": 2814 }, { "epoch": 5.193905817174516, "grad_norm": 1.9351158142089844, "learning_rate": 8.436e-06, "loss": 0.3284, "step": 2815 }, { "epoch": 5.195752539242844, "grad_norm": 0.9686079621315002, "learning_rate": 8.439e-06, "loss": 0.249, "step": 2816 }, { "epoch": 5.197599261311173, "grad_norm": 0.9176819324493408, "learning_rate": 8.442e-06, "loss": 0.2494, "step": 2817 }, { "epoch": 5.199445983379501, "grad_norm": 1.0931744575500488, "learning_rate": 8.445e-06, "loss": 0.2237, "step": 2818 }, { "epoch": 5.20129270544783, "grad_norm": 1.0970206260681152, "learning_rate": 8.448000000000001e-06, "loss": 0.2043, "step": 2819 }, { "epoch": 5.203139427516159, "grad_norm": 1.022572636604309, "learning_rate": 8.451000000000001e-06, "loss": 0.2139, "step": 2820 }, { "epoch": 5.204986149584488, "grad_norm": 1.361502766609192, "learning_rate": 8.454e-06, "loss": 0.1792, "step": 2821 }, { "epoch": 5.206832871652816, "grad_norm": 1.2826979160308838, "learning_rate": 8.457e-06, "loss": 0.2567, "step": 2822 }, { "epoch": 5.208679593721145, "grad_norm": 1.0315555334091187, "learning_rate": 8.459999999999999e-06, "loss": 0.1461, "step": 2823 }, { "epoch": 5.2105263157894735, "grad_norm": 1.2108699083328247, "learning_rate": 8.463e-06, "loss": 0.1435, "step": 2824 }, { "epoch": 5.212373037857803, "grad_norm": 1.0025715827941895, "learning_rate": 8.466e-06, "loss": 0.1251, "step": 2825 }, { "epoch": 5.214219759926131, "grad_norm": 0.7117714881896973, "learning_rate": 8.469e-06, "loss": 0.1303, "step": 2826 }, { "epoch": 5.21606648199446, "grad_norm": 0.5328357219696045, "learning_rate": 8.472e-06, "loss": 0.0406, "step": 2827 }, { "epoch": 5.217913204062788, "grad_norm": 1.2464380264282227, "learning_rate": 8.475e-06, "loss": 0.1048, "step": 2828 }, { "epoch": 5.219759926131117, "grad_norm": 0.9050692915916443, "learning_rate": 8.478e-06, "loss": 0.1046, "step": 2829 }, { "epoch": 5.221606648199446, "grad_norm": 0.7401684522628784, "learning_rate": 8.481e-06, "loss": 0.0854, "step": 2830 }, { "epoch": 5.223453370267775, "grad_norm": 0.5116982460021973, "learning_rate": 8.484e-06, "loss": 0.0457, "step": 2831 }, { "epoch": 5.225300092336103, "grad_norm": 0.7157394886016846, "learning_rate": 8.487e-06, "loss": 0.0564, "step": 2832 }, { "epoch": 5.227146814404432, "grad_norm": 1.1383388042449951, "learning_rate": 8.49e-06, "loss": 0.0675, "step": 2833 }, { "epoch": 5.22899353647276, "grad_norm": 0.8179400563240051, "learning_rate": 8.493000000000002e-06, "loss": 0.0565, "step": 2834 }, { "epoch": 5.23084025854109, "grad_norm": 0.9336811304092407, "learning_rate": 8.496e-06, "loss": 0.0617, "step": 2835 }, { "epoch": 5.232686980609419, "grad_norm": 0.8065788745880127, "learning_rate": 8.499e-06, "loss": 0.039, "step": 2836 }, { "epoch": 5.234533702677747, "grad_norm": 0.8431143760681152, "learning_rate": 8.502e-06, "loss": 0.0662, "step": 2837 }, { "epoch": 5.236380424746076, "grad_norm": 0.9685320854187012, "learning_rate": 8.504999999999999e-06, "loss": 0.056, "step": 2838 }, { "epoch": 5.238227146814404, "grad_norm": 1.681692123413086, "learning_rate": 8.508e-06, "loss": 0.0725, "step": 2839 }, { "epoch": 5.2400738688827335, "grad_norm": 0.757462739944458, "learning_rate": 8.511e-06, "loss": 0.0623, "step": 2840 }, { "epoch": 5.241920590951062, "grad_norm": 1.066636323928833, "learning_rate": 8.514e-06, "loss": 0.0556, "step": 2841 }, { "epoch": 5.243767313019391, "grad_norm": 0.7465166449546814, "learning_rate": 8.517e-06, "loss": 0.0405, "step": 2842 }, { "epoch": 5.245614035087719, "grad_norm": 0.8556094765663147, "learning_rate": 8.52e-06, "loss": 0.0518, "step": 2843 }, { "epoch": 5.247460757156048, "grad_norm": 0.8630668520927429, "learning_rate": 8.523000000000001e-06, "loss": 0.0534, "step": 2844 }, { "epoch": 5.249307479224377, "grad_norm": 0.9147830605506897, "learning_rate": 8.526000000000001e-06, "loss": 0.0472, "step": 2845 }, { "epoch": 5.251154201292706, "grad_norm": 0.936911404132843, "learning_rate": 8.529e-06, "loss": 0.0474, "step": 2846 }, { "epoch": 5.253000923361034, "grad_norm": 0.8163319230079651, "learning_rate": 8.532e-06, "loss": 0.0652, "step": 2847 }, { "epoch": 5.254847645429363, "grad_norm": 0.9939672946929932, "learning_rate": 8.534999999999999e-06, "loss": 0.0633, "step": 2848 }, { "epoch": 5.256694367497691, "grad_norm": 1.0096458196640015, "learning_rate": 8.538e-06, "loss": 0.0553, "step": 2849 }, { "epoch": 5.2585410895660205, "grad_norm": 1.5377824306488037, "learning_rate": 8.541e-06, "loss": 0.0983, "step": 2850 }, { "epoch": 5.260387811634349, "grad_norm": 0.9376013875007629, "learning_rate": 8.544e-06, "loss": 0.0647, "step": 2851 }, { "epoch": 5.262234533702678, "grad_norm": 1.0527558326721191, "learning_rate": 8.547e-06, "loss": 0.1096, "step": 2852 }, { "epoch": 5.264081255771006, "grad_norm": 0.8554616570472717, "learning_rate": 8.55e-06, "loss": 0.0608, "step": 2853 }, { "epoch": 5.265927977839335, "grad_norm": 0.9472962021827698, "learning_rate": 8.553000000000001e-06, "loss": 0.0812, "step": 2854 }, { "epoch": 5.267774699907664, "grad_norm": 0.8088082075119019, "learning_rate": 8.556e-06, "loss": 0.0601, "step": 2855 }, { "epoch": 5.269621421975993, "grad_norm": 0.9127931594848633, "learning_rate": 8.559e-06, "loss": 0.0693, "step": 2856 }, { "epoch": 5.271468144044321, "grad_norm": 0.7901133894920349, "learning_rate": 8.562e-06, "loss": 0.0515, "step": 2857 }, { "epoch": 5.27331486611265, "grad_norm": 1.4003362655639648, "learning_rate": 8.565e-06, "loss": 0.0728, "step": 2858 }, { "epoch": 5.275161588180978, "grad_norm": 1.0627663135528564, "learning_rate": 8.568000000000002e-06, "loss": 0.0749, "step": 2859 }, { "epoch": 5.2770083102493075, "grad_norm": 2.196826457977295, "learning_rate": 8.571e-06, "loss": 0.0995, "step": 2860 }, { "epoch": 5.278855032317637, "grad_norm": 1.8018981218338013, "learning_rate": 8.574e-06, "loss": 0.3984, "step": 2861 }, { "epoch": 5.280701754385965, "grad_norm": 1.298303246498108, "learning_rate": 8.577e-06, "loss": 0.4234, "step": 2862 }, { "epoch": 5.282548476454294, "grad_norm": 1.3109056949615479, "learning_rate": 8.58e-06, "loss": 0.3235, "step": 2863 }, { "epoch": 5.284395198522622, "grad_norm": 1.5986768007278442, "learning_rate": 8.583e-06, "loss": 0.282, "step": 2864 }, { "epoch": 5.286241920590951, "grad_norm": 1.181259274482727, "learning_rate": 8.586e-06, "loss": 0.2529, "step": 2865 }, { "epoch": 5.28808864265928, "grad_norm": 0.8804054260253906, "learning_rate": 8.589e-06, "loss": 0.2063, "step": 2866 }, { "epoch": 5.289935364727609, "grad_norm": 1.4291982650756836, "learning_rate": 8.592e-06, "loss": 0.2953, "step": 2867 }, { "epoch": 5.291782086795937, "grad_norm": 1.3200314044952393, "learning_rate": 8.595e-06, "loss": 0.2368, "step": 2868 }, { "epoch": 5.293628808864266, "grad_norm": 1.1014959812164307, "learning_rate": 8.598000000000001e-06, "loss": 0.2103, "step": 2869 }, { "epoch": 5.2954755309325945, "grad_norm": 0.9992939233779907, "learning_rate": 8.601000000000001e-06, "loss": 0.1908, "step": 2870 }, { "epoch": 5.297322253000924, "grad_norm": 0.9212896227836609, "learning_rate": 8.604000000000001e-06, "loss": 0.1221, "step": 2871 }, { "epoch": 5.299168975069252, "grad_norm": 0.8088216185569763, "learning_rate": 8.606999999999999e-06, "loss": 0.1182, "step": 2872 }, { "epoch": 5.301015697137581, "grad_norm": 0.789671778678894, "learning_rate": 8.609999999999999e-06, "loss": 0.1241, "step": 2873 }, { "epoch": 5.302862419205909, "grad_norm": 1.0368704795837402, "learning_rate": 8.613e-06, "loss": 0.0944, "step": 2874 }, { "epoch": 5.304709141274238, "grad_norm": 1.2252488136291504, "learning_rate": 8.616e-06, "loss": 0.1023, "step": 2875 }, { "epoch": 5.306555863342567, "grad_norm": 1.0052070617675781, "learning_rate": 8.619e-06, "loss": 0.0693, "step": 2876 }, { "epoch": 5.308402585410896, "grad_norm": 0.6709212064743042, "learning_rate": 8.622e-06, "loss": 0.0674, "step": 2877 }, { "epoch": 5.310249307479224, "grad_norm": 0.5491957664489746, "learning_rate": 8.625e-06, "loss": 0.0614, "step": 2878 }, { "epoch": 5.312096029547553, "grad_norm": 0.8416413068771362, "learning_rate": 8.628000000000001e-06, "loss": 0.0663, "step": 2879 }, { "epoch": 5.3139427516158815, "grad_norm": 0.8481470346450806, "learning_rate": 8.631000000000001e-06, "loss": 0.0751, "step": 2880 }, { "epoch": 5.315789473684211, "grad_norm": 1.0984277725219727, "learning_rate": 8.634e-06, "loss": 0.0693, "step": 2881 }, { "epoch": 5.317636195752539, "grad_norm": 0.9302229285240173, "learning_rate": 8.637e-06, "loss": 0.0926, "step": 2882 }, { "epoch": 5.319482917820868, "grad_norm": 0.49322235584259033, "learning_rate": 8.64e-06, "loss": 0.0357, "step": 2883 }, { "epoch": 5.321329639889196, "grad_norm": 0.730798602104187, "learning_rate": 8.643e-06, "loss": 0.0418, "step": 2884 }, { "epoch": 5.323176361957525, "grad_norm": 0.689988374710083, "learning_rate": 8.646e-06, "loss": 0.069, "step": 2885 }, { "epoch": 5.325023084025855, "grad_norm": 0.9584289789199829, "learning_rate": 8.649e-06, "loss": 0.0699, "step": 2886 }, { "epoch": 5.326869806094183, "grad_norm": 0.8632835745811462, "learning_rate": 8.652e-06, "loss": 0.0638, "step": 2887 }, { "epoch": 5.328716528162512, "grad_norm": 1.3212529420852661, "learning_rate": 8.655e-06, "loss": 0.0458, "step": 2888 }, { "epoch": 5.33056325023084, "grad_norm": 0.7729990482330322, "learning_rate": 8.658e-06, "loss": 0.0644, "step": 2889 }, { "epoch": 5.332409972299169, "grad_norm": 1.917777180671692, "learning_rate": 8.661e-06, "loss": 0.1455, "step": 2890 }, { "epoch": 5.334256694367498, "grad_norm": 1.0701407194137573, "learning_rate": 8.664e-06, "loss": 0.0824, "step": 2891 }, { "epoch": 5.336103416435827, "grad_norm": 0.8621686697006226, "learning_rate": 8.667e-06, "loss": 0.0592, "step": 2892 }, { "epoch": 5.337950138504155, "grad_norm": 0.9155268669128418, "learning_rate": 8.67e-06, "loss": 0.0749, "step": 2893 }, { "epoch": 5.339796860572484, "grad_norm": 0.8312194347381592, "learning_rate": 8.673000000000001e-06, "loss": 0.0484, "step": 2894 }, { "epoch": 5.341643582640812, "grad_norm": 1.4102102518081665, "learning_rate": 8.676000000000001e-06, "loss": 0.0683, "step": 2895 }, { "epoch": 5.3434903047091415, "grad_norm": 1.0904110670089722, "learning_rate": 8.679000000000001e-06, "loss": 0.0677, "step": 2896 }, { "epoch": 5.34533702677747, "grad_norm": 0.8273285627365112, "learning_rate": 8.682e-06, "loss": 0.0669, "step": 2897 }, { "epoch": 5.347183748845799, "grad_norm": 1.3562661409378052, "learning_rate": 8.684999999999999e-06, "loss": 0.0727, "step": 2898 }, { "epoch": 5.349030470914127, "grad_norm": 1.7254983186721802, "learning_rate": 8.688e-06, "loss": 0.065, "step": 2899 }, { "epoch": 5.350877192982456, "grad_norm": 0.9011834859848022, "learning_rate": 8.691e-06, "loss": 0.0575, "step": 2900 }, { "epoch": 5.352723915050785, "grad_norm": 0.9402887225151062, "learning_rate": 8.694e-06, "loss": 0.0535, "step": 2901 }, { "epoch": 5.354570637119114, "grad_norm": 0.9309573769569397, "learning_rate": 8.697e-06, "loss": 0.0476, "step": 2902 }, { "epoch": 5.356417359187442, "grad_norm": 0.9273201823234558, "learning_rate": 8.7e-06, "loss": 0.0502, "step": 2903 }, { "epoch": 5.358264081255771, "grad_norm": 0.978259265422821, "learning_rate": 8.703000000000001e-06, "loss": 0.0678, "step": 2904 }, { "epoch": 5.360110803324099, "grad_norm": 0.7556988000869751, "learning_rate": 8.706000000000001e-06, "loss": 0.0551, "step": 2905 }, { "epoch": 5.3619575253924285, "grad_norm": 0.8013013601303101, "learning_rate": 8.709e-06, "loss": 0.0589, "step": 2906 }, { "epoch": 5.363804247460757, "grad_norm": 0.9943859577178955, "learning_rate": 8.712e-06, "loss": 0.0761, "step": 2907 }, { "epoch": 5.365650969529086, "grad_norm": 1.665634036064148, "learning_rate": 8.715e-06, "loss": 0.0771, "step": 2908 }, { "epoch": 5.367497691597414, "grad_norm": 1.0174927711486816, "learning_rate": 8.718e-06, "loss": 0.0794, "step": 2909 }, { "epoch": 5.369344413665743, "grad_norm": 0.9980137944221497, "learning_rate": 8.721e-06, "loss": 0.0642, "step": 2910 }, { "epoch": 5.3711911357340725, "grad_norm": 1.8771740198135376, "learning_rate": 8.724e-06, "loss": 0.4153, "step": 2911 }, { "epoch": 5.373037857802401, "grad_norm": 2.773858070373535, "learning_rate": 8.727e-06, "loss": 0.4126, "step": 2912 }, { "epoch": 5.374884579870729, "grad_norm": 1.1791237592697144, "learning_rate": 8.73e-06, "loss": 0.286, "step": 2913 }, { "epoch": 5.376731301939058, "grad_norm": 1.0450700521469116, "learning_rate": 8.733000000000001e-06, "loss": 0.2737, "step": 2914 }, { "epoch": 5.378578024007387, "grad_norm": 0.9821802973747253, "learning_rate": 8.736e-06, "loss": 0.3204, "step": 2915 }, { "epoch": 5.3804247460757155, "grad_norm": 2.578831911087036, "learning_rate": 8.739e-06, "loss": 0.3898, "step": 2916 }, { "epoch": 5.382271468144045, "grad_norm": 1.2257970571517944, "learning_rate": 8.742e-06, "loss": 0.2362, "step": 2917 }, { "epoch": 5.384118190212373, "grad_norm": 1.0278574228286743, "learning_rate": 8.745e-06, "loss": 0.1894, "step": 2918 }, { "epoch": 5.385964912280702, "grad_norm": 1.2053768634796143, "learning_rate": 8.748000000000002e-06, "loss": 0.2346, "step": 2919 }, { "epoch": 5.38781163434903, "grad_norm": 1.3017722368240356, "learning_rate": 8.751000000000001e-06, "loss": 0.2011, "step": 2920 }, { "epoch": 5.3896583564173595, "grad_norm": 0.7116005420684814, "learning_rate": 8.754e-06, "loss": 0.1469, "step": 2921 }, { "epoch": 5.391505078485688, "grad_norm": 2.64703106880188, "learning_rate": 8.757e-06, "loss": 0.1881, "step": 2922 }, { "epoch": 5.393351800554017, "grad_norm": 1.7918483018875122, "learning_rate": 8.759999999999999e-06, "loss": 0.1773, "step": 2923 }, { "epoch": 5.395198522622345, "grad_norm": 0.6470117568969727, "learning_rate": 8.763e-06, "loss": 0.0867, "step": 2924 }, { "epoch": 5.397045244690674, "grad_norm": 2.316588878631592, "learning_rate": 8.766e-06, "loss": 0.193, "step": 2925 }, { "epoch": 5.3988919667590025, "grad_norm": 1.2009514570236206, "learning_rate": 8.769e-06, "loss": 0.102, "step": 2926 }, { "epoch": 5.400738688827332, "grad_norm": 1.333717703819275, "learning_rate": 8.772e-06, "loss": 0.0831, "step": 2927 }, { "epoch": 5.40258541089566, "grad_norm": 1.4752955436706543, "learning_rate": 8.775e-06, "loss": 0.075, "step": 2928 }, { "epoch": 5.404432132963989, "grad_norm": 0.9960635900497437, "learning_rate": 8.778000000000001e-06, "loss": 0.0757, "step": 2929 }, { "epoch": 5.406278855032317, "grad_norm": 0.7506101131439209, "learning_rate": 8.781000000000001e-06, "loss": 0.0787, "step": 2930 }, { "epoch": 5.4081255771006465, "grad_norm": 0.7151127457618713, "learning_rate": 8.784000000000001e-06, "loss": 0.0586, "step": 2931 }, { "epoch": 5.409972299168975, "grad_norm": 0.9094399213790894, "learning_rate": 8.787e-06, "loss": 0.072, "step": 2932 }, { "epoch": 5.411819021237304, "grad_norm": 0.6951301097869873, "learning_rate": 8.79e-06, "loss": 0.0861, "step": 2933 }, { "epoch": 5.413665743305632, "grad_norm": 0.6631410717964172, "learning_rate": 8.793e-06, "loss": 0.0524, "step": 2934 }, { "epoch": 5.415512465373961, "grad_norm": 0.6533665060997009, "learning_rate": 8.796e-06, "loss": 0.0517, "step": 2935 }, { "epoch": 5.41735918744229, "grad_norm": 0.8334203362464905, "learning_rate": 8.799e-06, "loss": 0.0591, "step": 2936 }, { "epoch": 5.419205909510619, "grad_norm": 0.786499559879303, "learning_rate": 8.802e-06, "loss": 0.061, "step": 2937 }, { "epoch": 5.421052631578947, "grad_norm": 0.8949714303016663, "learning_rate": 8.805e-06, "loss": 0.076, "step": 2938 }, { "epoch": 5.422899353647276, "grad_norm": 0.8435794711112976, "learning_rate": 8.808000000000001e-06, "loss": 0.0617, "step": 2939 }, { "epoch": 5.424746075715605, "grad_norm": 0.7329889535903931, "learning_rate": 8.811000000000001e-06, "loss": 0.0463, "step": 2940 }, { "epoch": 5.426592797783933, "grad_norm": 0.694934070110321, "learning_rate": 8.814e-06, "loss": 0.0467, "step": 2941 }, { "epoch": 5.428439519852263, "grad_norm": 0.8145605325698853, "learning_rate": 8.817e-06, "loss": 0.0664, "step": 2942 }, { "epoch": 5.430286241920591, "grad_norm": 0.9637972712516785, "learning_rate": 8.82e-06, "loss": 0.0718, "step": 2943 }, { "epoch": 5.43213296398892, "grad_norm": 0.6921423673629761, "learning_rate": 8.823e-06, "loss": 0.0524, "step": 2944 }, { "epoch": 5.433979686057248, "grad_norm": 0.7768904566764832, "learning_rate": 8.826000000000002e-06, "loss": 0.0479, "step": 2945 }, { "epoch": 5.435826408125577, "grad_norm": 0.941554069519043, "learning_rate": 8.829e-06, "loss": 0.0358, "step": 2946 }, { "epoch": 5.437673130193906, "grad_norm": 1.102005124092102, "learning_rate": 8.832e-06, "loss": 0.0757, "step": 2947 }, { "epoch": 5.439519852262235, "grad_norm": 1.0594245195388794, "learning_rate": 8.835e-06, "loss": 0.0408, "step": 2948 }, { "epoch": 5.441366574330563, "grad_norm": 0.686547577381134, "learning_rate": 8.837999999999999e-06, "loss": 0.0511, "step": 2949 }, { "epoch": 5.443213296398892, "grad_norm": 0.8735717535018921, "learning_rate": 8.841e-06, "loss": 0.0484, "step": 2950 }, { "epoch": 5.44506001846722, "grad_norm": 0.9363495707511902, "learning_rate": 8.844e-06, "loss": 0.0892, "step": 2951 }, { "epoch": 5.44690674053555, "grad_norm": 0.9038994908332825, "learning_rate": 8.847e-06, "loss": 0.0466, "step": 2952 }, { "epoch": 5.448753462603878, "grad_norm": 1.1139678955078125, "learning_rate": 8.85e-06, "loss": 0.0843, "step": 2953 }, { "epoch": 5.450600184672207, "grad_norm": 1.3504257202148438, "learning_rate": 8.853e-06, "loss": 0.0634, "step": 2954 }, { "epoch": 5.452446906740535, "grad_norm": 0.7875360250473022, "learning_rate": 8.856000000000001e-06, "loss": 0.0557, "step": 2955 }, { "epoch": 5.454293628808864, "grad_norm": 1.0293179750442505, "learning_rate": 8.859000000000001e-06, "loss": 0.0516, "step": 2956 }, { "epoch": 5.456140350877193, "grad_norm": 1.1746110916137695, "learning_rate": 8.862000000000001e-06, "loss": 0.0404, "step": 2957 }, { "epoch": 5.457987072945522, "grad_norm": 0.681563138961792, "learning_rate": 8.864999999999999e-06, "loss": 0.0412, "step": 2958 }, { "epoch": 5.45983379501385, "grad_norm": 1.1411263942718506, "learning_rate": 8.867999999999999e-06, "loss": 0.0845, "step": 2959 }, { "epoch": 5.461680517082179, "grad_norm": 1.3232712745666504, "learning_rate": 8.871e-06, "loss": 0.1214, "step": 2960 }, { "epoch": 5.463527239150508, "grad_norm": 1.824684739112854, "learning_rate": 8.874e-06, "loss": 0.4566, "step": 2961 }, { "epoch": 5.465373961218837, "grad_norm": 1.305968165397644, "learning_rate": 8.877e-06, "loss": 0.4121, "step": 2962 }, { "epoch": 5.467220683287165, "grad_norm": 1.9621754884719849, "learning_rate": 8.88e-06, "loss": 0.252, "step": 2963 }, { "epoch": 5.469067405355494, "grad_norm": 1.6577662229537964, "learning_rate": 8.883e-06, "loss": 0.306, "step": 2964 }, { "epoch": 5.470914127423823, "grad_norm": 1.061056137084961, "learning_rate": 8.886000000000001e-06, "loss": 0.2434, "step": 2965 }, { "epoch": 5.472760849492151, "grad_norm": 1.2487244606018066, "learning_rate": 8.889e-06, "loss": 0.2658, "step": 2966 }, { "epoch": 5.4746075715604805, "grad_norm": 1.0350834131240845, "learning_rate": 8.892e-06, "loss": 0.2689, "step": 2967 }, { "epoch": 5.476454293628809, "grad_norm": 0.811021089553833, "learning_rate": 8.895e-06, "loss": 0.1559, "step": 2968 }, { "epoch": 5.478301015697138, "grad_norm": 0.9796880483627319, "learning_rate": 8.898e-06, "loss": 0.3147, "step": 2969 }, { "epoch": 5.480147737765466, "grad_norm": 1.0987402200698853, "learning_rate": 8.901e-06, "loss": 0.1979, "step": 2970 }, { "epoch": 5.481994459833795, "grad_norm": 1.0269370079040527, "learning_rate": 8.904e-06, "loss": 0.1935, "step": 2971 }, { "epoch": 5.4838411819021236, "grad_norm": 0.9793119430541992, "learning_rate": 8.907e-06, "loss": 0.1526, "step": 2972 }, { "epoch": 5.485687903970453, "grad_norm": 0.7819326519966125, "learning_rate": 8.91e-06, "loss": 0.1513, "step": 2973 }, { "epoch": 5.487534626038781, "grad_norm": 0.9465290307998657, "learning_rate": 8.913e-06, "loss": 0.1285, "step": 2974 }, { "epoch": 5.48938134810711, "grad_norm": 0.9836941957473755, "learning_rate": 8.916e-06, "loss": 0.1069, "step": 2975 }, { "epoch": 5.491228070175438, "grad_norm": 0.9329361915588379, "learning_rate": 8.919e-06, "loss": 0.1254, "step": 2976 }, { "epoch": 5.4930747922437675, "grad_norm": 1.0424612760543823, "learning_rate": 8.922e-06, "loss": 0.107, "step": 2977 }, { "epoch": 5.494921514312096, "grad_norm": 1.5464701652526855, "learning_rate": 8.925e-06, "loss": 0.0687, "step": 2978 }, { "epoch": 5.496768236380425, "grad_norm": 0.681675136089325, "learning_rate": 8.928e-06, "loss": 0.0608, "step": 2979 }, { "epoch": 5.498614958448753, "grad_norm": 1.3586539030075073, "learning_rate": 8.931000000000001e-06, "loss": 0.1181, "step": 2980 }, { "epoch": 5.500461680517082, "grad_norm": 1.1351392269134521, "learning_rate": 8.934000000000001e-06, "loss": 0.0602, "step": 2981 }, { "epoch": 5.5023084025854105, "grad_norm": 0.9219081401824951, "learning_rate": 8.937000000000001e-06, "loss": 0.098, "step": 2982 }, { "epoch": 5.50415512465374, "grad_norm": 1.2950011491775513, "learning_rate": 8.939999999999999e-06, "loss": 0.0701, "step": 2983 }, { "epoch": 5.506001846722068, "grad_norm": 0.7642313838005066, "learning_rate": 8.942999999999999e-06, "loss": 0.0474, "step": 2984 }, { "epoch": 5.507848568790397, "grad_norm": 1.5005654096603394, "learning_rate": 8.946e-06, "loss": 0.0863, "step": 2985 }, { "epoch": 5.509695290858726, "grad_norm": 0.8310704231262207, "learning_rate": 8.949e-06, "loss": 0.0736, "step": 2986 }, { "epoch": 5.5115420129270545, "grad_norm": 0.8422238826751709, "learning_rate": 8.952e-06, "loss": 0.0833, "step": 2987 }, { "epoch": 5.513388734995383, "grad_norm": 0.650684654712677, "learning_rate": 8.955e-06, "loss": 0.0531, "step": 2988 }, { "epoch": 5.515235457063712, "grad_norm": 0.9870924949645996, "learning_rate": 8.958e-06, "loss": 0.0594, "step": 2989 }, { "epoch": 5.517082179132041, "grad_norm": 1.1698861122131348, "learning_rate": 8.961000000000001e-06, "loss": 0.0559, "step": 2990 }, { "epoch": 5.518928901200369, "grad_norm": 0.7865745425224304, "learning_rate": 8.964000000000001e-06, "loss": 0.0697, "step": 2991 }, { "epoch": 5.520775623268698, "grad_norm": 1.2937891483306885, "learning_rate": 8.967e-06, "loss": 0.0656, "step": 2992 }, { "epoch": 5.522622345337027, "grad_norm": 1.5481828451156616, "learning_rate": 8.97e-06, "loss": 0.063, "step": 2993 }, { "epoch": 5.524469067405356, "grad_norm": 1.1445446014404297, "learning_rate": 8.973e-06, "loss": 0.0693, "step": 2994 }, { "epoch": 5.526315789473684, "grad_norm": 1.5518786907196045, "learning_rate": 8.976e-06, "loss": 0.0556, "step": 2995 }, { "epoch": 5.528162511542013, "grad_norm": 0.5828351974487305, "learning_rate": 8.979e-06, "loss": 0.0341, "step": 2996 }, { "epoch": 5.5300092336103415, "grad_norm": 0.7857339978218079, "learning_rate": 8.982e-06, "loss": 0.0552, "step": 2997 }, { "epoch": 5.531855955678671, "grad_norm": 0.8015343546867371, "learning_rate": 8.985e-06, "loss": 0.0654, "step": 2998 }, { "epoch": 5.533702677746999, "grad_norm": 1.580649495124817, "learning_rate": 8.988e-06, "loss": 0.0526, "step": 2999 }, { "epoch": 5.535549399815328, "grad_norm": 1.2285726070404053, "learning_rate": 8.991e-06, "loss": 0.0604, "step": 3000 }, { "epoch": 5.535549399815328, "eval_cer": 0.13466573346812868, "eval_loss": 0.42114710807800293, "eval_runtime": 16.2471, "eval_samples_per_second": 18.711, "eval_steps_per_second": 0.615, "eval_wer": 0.4785111281657713, "step": 3000 }, { "epoch": 5.537396121883656, "grad_norm": 2.3396427631378174, "learning_rate": 8.994e-06, "loss": 0.0483, "step": 3001 }, { "epoch": 5.539242843951985, "grad_norm": 1.1838312149047852, "learning_rate": 8.997e-06, "loss": 0.0643, "step": 3002 }, { "epoch": 5.541089566020314, "grad_norm": 1.489532470703125, "learning_rate": 9e-06, "loss": 0.0414, "step": 3003 }, { "epoch": 5.542936288088643, "grad_norm": 0.9960378408432007, "learning_rate": 9.003e-06, "loss": 0.056, "step": 3004 }, { "epoch": 5.544783010156971, "grad_norm": 1.023935079574585, "learning_rate": 9.006000000000002e-06, "loss": 0.0585, "step": 3005 }, { "epoch": 5.5466297322253, "grad_norm": 1.3380311727523804, "learning_rate": 9.009000000000001e-06, "loss": 0.0695, "step": 3006 }, { "epoch": 5.5484764542936285, "grad_norm": 1.2113758325576782, "learning_rate": 9.012e-06, "loss": 0.0856, "step": 3007 }, { "epoch": 5.550323176361958, "grad_norm": 1.1438714265823364, "learning_rate": 9.015e-06, "loss": 0.0752, "step": 3008 }, { "epoch": 5.552169898430286, "grad_norm": 1.388740062713623, "learning_rate": 9.017999999999999e-06, "loss": 0.0974, "step": 3009 }, { "epoch": 5.554016620498615, "grad_norm": 1.1810368299484253, "learning_rate": 9.021e-06, "loss": 0.1307, "step": 3010 }, { "epoch": 5.555863342566944, "grad_norm": 3.170240640640259, "learning_rate": 9.024e-06, "loss": 0.5096, "step": 3011 }, { "epoch": 5.557710064635272, "grad_norm": 1.2501941919326782, "learning_rate": 9.027e-06, "loss": 0.3023, "step": 3012 }, { "epoch": 5.559556786703601, "grad_norm": 1.2062792778015137, "learning_rate": 9.03e-06, "loss": 0.3017, "step": 3013 }, { "epoch": 5.56140350877193, "grad_norm": 0.9803247451782227, "learning_rate": 9.033e-06, "loss": 0.2733, "step": 3014 }, { "epoch": 5.563250230840259, "grad_norm": 0.9785440564155579, "learning_rate": 9.036000000000001e-06, "loss": 0.2386, "step": 3015 }, { "epoch": 5.565096952908587, "grad_norm": 2.5354113578796387, "learning_rate": 9.039000000000001e-06, "loss": 0.2437, "step": 3016 }, { "epoch": 5.566943674976916, "grad_norm": 1.2615872621536255, "learning_rate": 9.042e-06, "loss": 0.2878, "step": 3017 }, { "epoch": 5.568790397045245, "grad_norm": 0.8574416637420654, "learning_rate": 9.045e-06, "loss": 0.1853, "step": 3018 }, { "epoch": 5.570637119113574, "grad_norm": 1.2679378986358643, "learning_rate": 9.048e-06, "loss": 0.2943, "step": 3019 }, { "epoch": 5.572483841181902, "grad_norm": 1.3468197584152222, "learning_rate": 9.051e-06, "loss": 0.2309, "step": 3020 }, { "epoch": 5.574330563250231, "grad_norm": 0.8612178564071655, "learning_rate": 9.054e-06, "loss": 0.1631, "step": 3021 }, { "epoch": 5.576177285318559, "grad_norm": 5.9170989990234375, "learning_rate": 9.057e-06, "loss": 0.2011, "step": 3022 }, { "epoch": 5.5780240073868885, "grad_norm": 1.277970552444458, "learning_rate": 9.06e-06, "loss": 0.2202, "step": 3023 }, { "epoch": 5.579870729455217, "grad_norm": 0.7107061147689819, "learning_rate": 9.063e-06, "loss": 0.069, "step": 3024 }, { "epoch": 5.581717451523546, "grad_norm": 0.9380223751068115, "learning_rate": 9.066000000000001e-06, "loss": 0.0869, "step": 3025 }, { "epoch": 5.583564173591874, "grad_norm": 1.3844622373580933, "learning_rate": 9.069e-06, "loss": 0.0685, "step": 3026 }, { "epoch": 5.585410895660203, "grad_norm": 0.7406550645828247, "learning_rate": 9.072e-06, "loss": 0.056, "step": 3027 }, { "epoch": 5.587257617728532, "grad_norm": 1.171863317489624, "learning_rate": 9.075e-06, "loss": 0.1099, "step": 3028 }, { "epoch": 5.589104339796861, "grad_norm": 0.9505486488342285, "learning_rate": 9.078e-06, "loss": 0.0639, "step": 3029 }, { "epoch": 5.590951061865189, "grad_norm": 0.8913169503211975, "learning_rate": 9.081000000000002e-06, "loss": 0.1375, "step": 3030 }, { "epoch": 5.592797783933518, "grad_norm": 1.2923994064331055, "learning_rate": 9.084000000000001e-06, "loss": 0.0891, "step": 3031 }, { "epoch": 5.594644506001846, "grad_norm": 1.3994338512420654, "learning_rate": 9.087e-06, "loss": 0.0706, "step": 3032 }, { "epoch": 5.5964912280701755, "grad_norm": 0.9740058779716492, "learning_rate": 9.09e-06, "loss": 0.0825, "step": 3033 }, { "epoch": 5.598337950138504, "grad_norm": 0.6192094683647156, "learning_rate": 9.093e-06, "loss": 0.0527, "step": 3034 }, { "epoch": 5.600184672206833, "grad_norm": 0.9898630976676941, "learning_rate": 9.096e-06, "loss": 0.059, "step": 3035 }, { "epoch": 5.602031394275162, "grad_norm": 0.9869064688682556, "learning_rate": 9.099e-06, "loss": 0.0926, "step": 3036 }, { "epoch": 5.60387811634349, "grad_norm": 0.775285542011261, "learning_rate": 9.102e-06, "loss": 0.0603, "step": 3037 }, { "epoch": 5.605724838411819, "grad_norm": 1.0720051527023315, "learning_rate": 9.105e-06, "loss": 0.0603, "step": 3038 }, { "epoch": 5.607571560480148, "grad_norm": 2.360222101211548, "learning_rate": 9.108e-06, "loss": 0.0702, "step": 3039 }, { "epoch": 5.609418282548477, "grad_norm": 0.9469456672668457, "learning_rate": 9.111000000000001e-06, "loss": 0.0443, "step": 3040 }, { "epoch": 5.611265004616805, "grad_norm": 1.5419272184371948, "learning_rate": 9.114000000000001e-06, "loss": 0.1102, "step": 3041 }, { "epoch": 5.613111726685134, "grad_norm": 0.6679257154464722, "learning_rate": 9.117000000000001e-06, "loss": 0.0591, "step": 3042 }, { "epoch": 5.6149584487534625, "grad_norm": 0.9657368063926697, "learning_rate": 9.12e-06, "loss": 0.0735, "step": 3043 }, { "epoch": 5.616805170821792, "grad_norm": 0.9724937677383423, "learning_rate": 9.122999999999999e-06, "loss": 0.0496, "step": 3044 }, { "epoch": 5.61865189289012, "grad_norm": 1.4401867389678955, "learning_rate": 9.126e-06, "loss": 0.0722, "step": 3045 }, { "epoch": 5.620498614958449, "grad_norm": 1.0665438175201416, "learning_rate": 9.129e-06, "loss": 0.0656, "step": 3046 }, { "epoch": 5.622345337026777, "grad_norm": 0.6612203121185303, "learning_rate": 9.132e-06, "loss": 0.0366, "step": 3047 }, { "epoch": 5.624192059095106, "grad_norm": 1.3835797309875488, "learning_rate": 9.135e-06, "loss": 0.0824, "step": 3048 }, { "epoch": 5.626038781163435, "grad_norm": 0.8385351896286011, "learning_rate": 9.138e-06, "loss": 0.0671, "step": 3049 }, { "epoch": 5.627885503231764, "grad_norm": 0.7732231020927429, "learning_rate": 9.141000000000001e-06, "loss": 0.0547, "step": 3050 }, { "epoch": 5.629732225300092, "grad_norm": 0.8967553973197937, "learning_rate": 9.144000000000001e-06, "loss": 0.04, "step": 3051 }, { "epoch": 5.631578947368421, "grad_norm": 0.8584074378013611, "learning_rate": 9.147e-06, "loss": 0.0607, "step": 3052 }, { "epoch": 5.6334256694367495, "grad_norm": 0.8602099418640137, "learning_rate": 9.15e-06, "loss": 0.052, "step": 3053 }, { "epoch": 5.635272391505079, "grad_norm": 1.226857304573059, "learning_rate": 9.153e-06, "loss": 0.0616, "step": 3054 }, { "epoch": 5.637119113573407, "grad_norm": 1.038644552230835, "learning_rate": 9.156000000000002e-06, "loss": 0.0821, "step": 3055 }, { "epoch": 5.638965835641736, "grad_norm": 1.0842891931533813, "learning_rate": 9.159e-06, "loss": 0.0758, "step": 3056 }, { "epoch": 5.640812557710064, "grad_norm": 0.8205846548080444, "learning_rate": 9.162e-06, "loss": 0.0582, "step": 3057 }, { "epoch": 5.642659279778393, "grad_norm": 1.210604190826416, "learning_rate": 9.165e-06, "loss": 0.0614, "step": 3058 }, { "epoch": 5.644506001846722, "grad_norm": 0.8624764084815979, "learning_rate": 9.168e-06, "loss": 0.0662, "step": 3059 }, { "epoch": 5.646352723915051, "grad_norm": 1.716399073600769, "learning_rate": 9.171e-06, "loss": 0.1065, "step": 3060 }, { "epoch": 5.64819944598338, "grad_norm": 1.7060513496398926, "learning_rate": 9.174e-06, "loss": 0.3817, "step": 3061 }, { "epoch": 5.650046168051708, "grad_norm": 1.4844878911972046, "learning_rate": 9.177e-06, "loss": 0.3893, "step": 3062 }, { "epoch": 5.6518928901200365, "grad_norm": 1.199528694152832, "learning_rate": 9.18e-06, "loss": 0.2969, "step": 3063 }, { "epoch": 5.653739612188366, "grad_norm": 1.4310998916625977, "learning_rate": 9.183e-06, "loss": 0.2899, "step": 3064 }, { "epoch": 5.655586334256695, "grad_norm": 1.5166258811950684, "learning_rate": 9.186000000000001e-06, "loss": 0.363, "step": 3065 }, { "epoch": 5.657433056325023, "grad_norm": 1.2674273252487183, "learning_rate": 9.189000000000001e-06, "loss": 0.2383, "step": 3066 }, { "epoch": 5.659279778393352, "grad_norm": 1.0732433795928955, "learning_rate": 9.192000000000001e-06, "loss": 0.177, "step": 3067 }, { "epoch": 5.66112650046168, "grad_norm": 1.4909396171569824, "learning_rate": 9.195000000000001e-06, "loss": 0.254, "step": 3068 }, { "epoch": 5.66297322253001, "grad_norm": 0.8169516324996948, "learning_rate": 9.197999999999999e-06, "loss": 0.1878, "step": 3069 }, { "epoch": 5.664819944598338, "grad_norm": 0.9201124310493469, "learning_rate": 9.200999999999999e-06, "loss": 0.1467, "step": 3070 }, { "epoch": 5.666666666666667, "grad_norm": 2.299650192260742, "learning_rate": 9.204e-06, "loss": 0.1529, "step": 3071 }, { "epoch": 5.668513388734995, "grad_norm": 1.8124525547027588, "learning_rate": 9.207e-06, "loss": 0.1673, "step": 3072 }, { "epoch": 5.670360110803324, "grad_norm": 1.476135015487671, "learning_rate": 9.21e-06, "loss": 0.2229, "step": 3073 }, { "epoch": 5.672206832871653, "grad_norm": 0.6514289379119873, "learning_rate": 9.213e-06, "loss": 0.0952, "step": 3074 }, { "epoch": 5.674053554939982, "grad_norm": 0.6251052618026733, "learning_rate": 9.216e-06, "loss": 0.0813, "step": 3075 }, { "epoch": 5.67590027700831, "grad_norm": 0.7795804738998413, "learning_rate": 9.219000000000001e-06, "loss": 0.0616, "step": 3076 }, { "epoch": 5.677746999076639, "grad_norm": 0.8727070093154907, "learning_rate": 9.222e-06, "loss": 0.0755, "step": 3077 }, { "epoch": 5.679593721144967, "grad_norm": 0.6587457656860352, "learning_rate": 9.225e-06, "loss": 0.0533, "step": 3078 }, { "epoch": 5.6814404432132966, "grad_norm": 0.7225388288497925, "learning_rate": 9.228e-06, "loss": 0.0547, "step": 3079 }, { "epoch": 5.683287165281625, "grad_norm": 2.320605516433716, "learning_rate": 9.231e-06, "loss": 0.0504, "step": 3080 }, { "epoch": 5.685133887349954, "grad_norm": 1.1022893190383911, "learning_rate": 9.234e-06, "loss": 0.0463, "step": 3081 }, { "epoch": 5.686980609418282, "grad_norm": 0.8792228102684021, "learning_rate": 9.237e-06, "loss": 0.0478, "step": 3082 }, { "epoch": 5.688827331486611, "grad_norm": 0.5602882504463196, "learning_rate": 9.24e-06, "loss": 0.0382, "step": 3083 }, { "epoch": 5.69067405355494, "grad_norm": 0.7190514206886292, "learning_rate": 9.243e-06, "loss": 0.0518, "step": 3084 }, { "epoch": 5.692520775623269, "grad_norm": 0.8902875185012817, "learning_rate": 9.246e-06, "loss": 0.0827, "step": 3085 }, { "epoch": 5.694367497691598, "grad_norm": 0.8684836626052856, "learning_rate": 9.249e-06, "loss": 0.06, "step": 3086 }, { "epoch": 5.696214219759926, "grad_norm": 1.0752253532409668, "learning_rate": 9.252e-06, "loss": 0.0586, "step": 3087 }, { "epoch": 5.698060941828254, "grad_norm": 1.1792585849761963, "learning_rate": 9.255e-06, "loss": 0.132, "step": 3088 }, { "epoch": 5.6999076638965835, "grad_norm": 1.2840609550476074, "learning_rate": 9.258e-06, "loss": 0.1063, "step": 3089 }, { "epoch": 5.701754385964913, "grad_norm": 0.7193743586540222, "learning_rate": 9.261e-06, "loss": 0.0474, "step": 3090 }, { "epoch": 5.703601108033241, "grad_norm": 0.9113737344741821, "learning_rate": 9.264000000000001e-06, "loss": 0.0869, "step": 3091 }, { "epoch": 5.70544783010157, "grad_norm": 0.9547917246818542, "learning_rate": 9.267000000000001e-06, "loss": 0.0489, "step": 3092 }, { "epoch": 5.707294552169898, "grad_norm": 3.226956605911255, "learning_rate": 9.27e-06, "loss": 0.0482, "step": 3093 }, { "epoch": 5.7091412742382275, "grad_norm": 0.8785438537597656, "learning_rate": 9.272999999999999e-06, "loss": 0.0475, "step": 3094 }, { "epoch": 5.710987996306556, "grad_norm": 0.9990173578262329, "learning_rate": 9.275999999999999e-06, "loss": 0.057, "step": 3095 }, { "epoch": 5.712834718374885, "grad_norm": 0.8454262018203735, "learning_rate": 9.279e-06, "loss": 0.0549, "step": 3096 }, { "epoch": 5.714681440443213, "grad_norm": 1.1370716094970703, "learning_rate": 9.282e-06, "loss": 0.0599, "step": 3097 }, { "epoch": 5.716528162511542, "grad_norm": 1.1761515140533447, "learning_rate": 9.285e-06, "loss": 0.0587, "step": 3098 }, { "epoch": 5.7183748845798705, "grad_norm": 1.1789077520370483, "learning_rate": 9.288e-06, "loss": 0.0788, "step": 3099 }, { "epoch": 5.7202216066482, "grad_norm": 3.341630458831787, "learning_rate": 9.291e-06, "loss": 0.0766, "step": 3100 }, { "epoch": 5.722068328716528, "grad_norm": 1.213159441947937, "learning_rate": 9.294000000000001e-06, "loss": 0.0727, "step": 3101 }, { "epoch": 5.723915050784857, "grad_norm": 0.9439771175384521, "learning_rate": 9.297000000000001e-06, "loss": 0.0622, "step": 3102 }, { "epoch": 5.725761772853185, "grad_norm": 0.8545637726783752, "learning_rate": 9.3e-06, "loss": 0.0484, "step": 3103 }, { "epoch": 5.7276084949215145, "grad_norm": 0.8875400424003601, "learning_rate": 9.303e-06, "loss": 0.0614, "step": 3104 }, { "epoch": 5.729455216989843, "grad_norm": 1.552079439163208, "learning_rate": 9.306e-06, "loss": 0.0628, "step": 3105 }, { "epoch": 5.731301939058172, "grad_norm": 1.0208044052124023, "learning_rate": 9.309e-06, "loss": 0.0628, "step": 3106 }, { "epoch": 5.7331486611265, "grad_norm": 1.533717155456543, "learning_rate": 9.312e-06, "loss": 0.0511, "step": 3107 }, { "epoch": 5.734995383194829, "grad_norm": 1.1809885501861572, "learning_rate": 9.315e-06, "loss": 0.0588, "step": 3108 }, { "epoch": 5.7368421052631575, "grad_norm": 0.9960464239120483, "learning_rate": 9.318e-06, "loss": 0.0603, "step": 3109 }, { "epoch": 5.738688827331487, "grad_norm": 0.9623493552207947, "learning_rate": 9.321e-06, "loss": 0.063, "step": 3110 }, { "epoch": 5.740535549399816, "grad_norm": 4.995608329772949, "learning_rate": 9.324000000000001e-06, "loss": 0.4545, "step": 3111 }, { "epoch": 5.742382271468144, "grad_norm": 1.443178415298462, "learning_rate": 9.327e-06, "loss": 0.3614, "step": 3112 }, { "epoch": 5.744228993536472, "grad_norm": 3.596264362335205, "learning_rate": 9.33e-06, "loss": 0.3435, "step": 3113 }, { "epoch": 5.7460757156048015, "grad_norm": 2.392505168914795, "learning_rate": 9.333e-06, "loss": 0.4267, "step": 3114 }, { "epoch": 5.747922437673131, "grad_norm": 1.9041658639907837, "learning_rate": 9.336e-06, "loss": 0.3021, "step": 3115 }, { "epoch": 5.749769159741459, "grad_norm": 1.9096932411193848, "learning_rate": 9.339000000000002e-06, "loss": 0.3205, "step": 3116 }, { "epoch": 5.751615881809788, "grad_norm": 3.222970485687256, "learning_rate": 9.342000000000001e-06, "loss": 0.2452, "step": 3117 }, { "epoch": 5.753462603878116, "grad_norm": 1.1893322467803955, "learning_rate": 9.345e-06, "loss": 0.2122, "step": 3118 }, { "epoch": 5.755309325946445, "grad_norm": 1.0547327995300293, "learning_rate": 9.348e-06, "loss": 0.2251, "step": 3119 }, { "epoch": 5.757156048014774, "grad_norm": 0.8825430274009705, "learning_rate": 9.350999999999999e-06, "loss": 0.1535, "step": 3120 }, { "epoch": 5.759002770083103, "grad_norm": 0.8509583473205566, "learning_rate": 9.354e-06, "loss": 0.1839, "step": 3121 }, { "epoch": 5.760849492151431, "grad_norm": 0.9790893793106079, "learning_rate": 9.357e-06, "loss": 0.1476, "step": 3122 }, { "epoch": 5.76269621421976, "grad_norm": 0.9421710968017578, "learning_rate": 9.36e-06, "loss": 0.1617, "step": 3123 }, { "epoch": 5.7645429362880884, "grad_norm": 0.9089346528053284, "learning_rate": 9.363e-06, "loss": 0.1796, "step": 3124 }, { "epoch": 5.766389658356418, "grad_norm": 1.144519567489624, "learning_rate": 9.366e-06, "loss": 0.0917, "step": 3125 }, { "epoch": 5.768236380424746, "grad_norm": 0.8784078359603882, "learning_rate": 9.369000000000001e-06, "loss": 0.0894, "step": 3126 }, { "epoch": 5.770083102493075, "grad_norm": 1.1179689168930054, "learning_rate": 9.372000000000001e-06, "loss": 0.0808, "step": 3127 }, { "epoch": 5.771929824561403, "grad_norm": 0.7009519338607788, "learning_rate": 9.375000000000001e-06, "loss": 0.0506, "step": 3128 }, { "epoch": 5.773776546629732, "grad_norm": 0.6712589263916016, "learning_rate": 9.378e-06, "loss": 0.0589, "step": 3129 }, { "epoch": 5.775623268698061, "grad_norm": 0.6814177632331848, "learning_rate": 9.380999999999999e-06, "loss": 0.0581, "step": 3130 }, { "epoch": 5.77746999076639, "grad_norm": 0.7124582529067993, "learning_rate": 9.384e-06, "loss": 0.0508, "step": 3131 }, { "epoch": 5.779316712834718, "grad_norm": 0.8895642161369324, "learning_rate": 9.387e-06, "loss": 0.0597, "step": 3132 }, { "epoch": 5.781163434903047, "grad_norm": 0.9447484016418457, "learning_rate": 9.39e-06, "loss": 0.0638, "step": 3133 }, { "epoch": 5.783010156971375, "grad_norm": 0.7462688684463501, "learning_rate": 9.393e-06, "loss": 0.0559, "step": 3134 }, { "epoch": 5.784856879039705, "grad_norm": 0.95284503698349, "learning_rate": 9.396e-06, "loss": 0.0563, "step": 3135 }, { "epoch": 5.786703601108034, "grad_norm": 0.926132321357727, "learning_rate": 9.399000000000001e-06, "loss": 0.0669, "step": 3136 }, { "epoch": 5.788550323176362, "grad_norm": 1.1923774480819702, "learning_rate": 9.402e-06, "loss": 0.0613, "step": 3137 }, { "epoch": 5.79039704524469, "grad_norm": 0.9798671007156372, "learning_rate": 9.405e-06, "loss": 0.0487, "step": 3138 }, { "epoch": 5.792243767313019, "grad_norm": 0.7260217666625977, "learning_rate": 9.408e-06, "loss": 0.0465, "step": 3139 }, { "epoch": 5.7940904893813485, "grad_norm": 0.6956988573074341, "learning_rate": 9.411e-06, "loss": 0.0556, "step": 3140 }, { "epoch": 5.795937211449677, "grad_norm": 0.7585822939872742, "learning_rate": 9.414000000000002e-06, "loss": 0.05, "step": 3141 }, { "epoch": 5.797783933518006, "grad_norm": 0.8594281077384949, "learning_rate": 9.417e-06, "loss": 0.067, "step": 3142 }, { "epoch": 5.799630655586334, "grad_norm": 0.8605832457542419, "learning_rate": 9.42e-06, "loss": 0.0506, "step": 3143 }, { "epoch": 5.801477377654663, "grad_norm": 0.8311311602592468, "learning_rate": 9.423e-06, "loss": 0.0428, "step": 3144 }, { "epoch": 5.803324099722992, "grad_norm": 0.9315934181213379, "learning_rate": 9.426e-06, "loss": 0.0493, "step": 3145 }, { "epoch": 5.805170821791321, "grad_norm": 1.0022386312484741, "learning_rate": 9.429e-06, "loss": 0.0762, "step": 3146 }, { "epoch": 5.807017543859649, "grad_norm": 0.9364701509475708, "learning_rate": 9.432e-06, "loss": 0.0718, "step": 3147 }, { "epoch": 5.808864265927978, "grad_norm": 1.5895075798034668, "learning_rate": 9.435e-06, "loss": 0.0597, "step": 3148 }, { "epoch": 5.810710987996306, "grad_norm": 0.939860999584198, "learning_rate": 9.438e-06, "loss": 0.0628, "step": 3149 }, { "epoch": 5.8125577100646355, "grad_norm": 1.6381874084472656, "learning_rate": 9.441e-06, "loss": 0.0701, "step": 3150 }, { "epoch": 5.814404432132964, "grad_norm": 1.853114128112793, "learning_rate": 9.444000000000001e-06, "loss": 0.0668, "step": 3151 }, { "epoch": 5.816251154201293, "grad_norm": 1.0566539764404297, "learning_rate": 9.447000000000001e-06, "loss": 0.0642, "step": 3152 }, { "epoch": 5.818097876269621, "grad_norm": 0.9682995676994324, "learning_rate": 9.450000000000001e-06, "loss": 0.0559, "step": 3153 }, { "epoch": 5.81994459833795, "grad_norm": 0.8305920362472534, "learning_rate": 9.453e-06, "loss": 0.0639, "step": 3154 }, { "epoch": 5.821791320406279, "grad_norm": 0.718386173248291, "learning_rate": 9.455999999999999e-06, "loss": 0.0377, "step": 3155 }, { "epoch": 5.823638042474608, "grad_norm": 0.9657241702079773, "learning_rate": 9.459e-06, "loss": 0.0737, "step": 3156 }, { "epoch": 5.825484764542936, "grad_norm": 0.8440473079681396, "learning_rate": 9.462e-06, "loss": 0.0526, "step": 3157 }, { "epoch": 5.827331486611265, "grad_norm": 1.2559155225753784, "learning_rate": 9.465e-06, "loss": 0.0827, "step": 3158 }, { "epoch": 5.829178208679593, "grad_norm": 1.968029499053955, "learning_rate": 9.468e-06, "loss": 0.076, "step": 3159 }, { "epoch": 5.8310249307479225, "grad_norm": 1.369443655014038, "learning_rate": 9.471e-06, "loss": 0.087, "step": 3160 }, { "epoch": 5.832871652816252, "grad_norm": 2.196585178375244, "learning_rate": 9.474000000000001e-06, "loss": 0.4043, "step": 3161 }, { "epoch": 5.83471837488458, "grad_norm": 1.450124740600586, "learning_rate": 9.477000000000001e-06, "loss": 0.3129, "step": 3162 }, { "epoch": 5.836565096952908, "grad_norm": 1.131670594215393, "learning_rate": 9.48e-06, "loss": 0.3256, "step": 3163 }, { "epoch": 5.838411819021237, "grad_norm": 1.5835055112838745, "learning_rate": 9.483e-06, "loss": 0.2832, "step": 3164 }, { "epoch": 5.840258541089566, "grad_norm": 1.0356972217559814, "learning_rate": 9.486e-06, "loss": 0.2298, "step": 3165 }, { "epoch": 5.842105263157895, "grad_norm": 1.202141523361206, "learning_rate": 9.489000000000002e-06, "loss": 0.3123, "step": 3166 }, { "epoch": 5.843951985226224, "grad_norm": 1.4132212400436401, "learning_rate": 9.492e-06, "loss": 0.2334, "step": 3167 }, { "epoch": 5.845798707294552, "grad_norm": 1.1922186613082886, "learning_rate": 9.495e-06, "loss": 0.1989, "step": 3168 }, { "epoch": 5.847645429362881, "grad_norm": 1.1139531135559082, "learning_rate": 9.498e-06, "loss": 0.1859, "step": 3169 }, { "epoch": 5.8494921514312095, "grad_norm": 1.1097437143325806, "learning_rate": 9.501e-06, "loss": 0.1994, "step": 3170 }, { "epoch": 5.851338873499539, "grad_norm": 1.480333685874939, "learning_rate": 9.504e-06, "loss": 0.2172, "step": 3171 }, { "epoch": 5.853185595567867, "grad_norm": 1.7595967054367065, "learning_rate": 9.507e-06, "loss": 0.1161, "step": 3172 }, { "epoch": 5.855032317636196, "grad_norm": 0.9520134329795837, "learning_rate": 9.51e-06, "loss": 0.1698, "step": 3173 }, { "epoch": 5.856879039704524, "grad_norm": 0.9602885246276855, "learning_rate": 9.513e-06, "loss": 0.1021, "step": 3174 }, { "epoch": 5.858725761772853, "grad_norm": 1.421912670135498, "learning_rate": 9.516e-06, "loss": 0.11, "step": 3175 }, { "epoch": 5.860572483841182, "grad_norm": 0.5686153769493103, "learning_rate": 9.519000000000002e-06, "loss": 0.0552, "step": 3176 }, { "epoch": 5.862419205909511, "grad_norm": 0.7969242334365845, "learning_rate": 9.522000000000001e-06, "loss": 0.0772, "step": 3177 }, { "epoch": 5.864265927977839, "grad_norm": 0.5964579582214355, "learning_rate": 9.525000000000001e-06, "loss": 0.055, "step": 3178 }, { "epoch": 5.866112650046168, "grad_norm": 1.263533353805542, "learning_rate": 9.528e-06, "loss": 0.079, "step": 3179 }, { "epoch": 5.8679593721144965, "grad_norm": 1.914730191230774, "learning_rate": 9.530999999999999e-06, "loss": 0.0612, "step": 3180 }, { "epoch": 5.869806094182826, "grad_norm": 0.6636876463890076, "learning_rate": 9.534e-06, "loss": 0.051, "step": 3181 }, { "epoch": 5.871652816251154, "grad_norm": 1.0478947162628174, "learning_rate": 9.537e-06, "loss": 0.0555, "step": 3182 }, { "epoch": 5.873499538319483, "grad_norm": 0.65780109167099, "learning_rate": 9.54e-06, "loss": 0.0431, "step": 3183 }, { "epoch": 5.875346260387811, "grad_norm": 0.979624330997467, "learning_rate": 9.543e-06, "loss": 0.0671, "step": 3184 }, { "epoch": 5.87719298245614, "grad_norm": 0.8512119054794312, "learning_rate": 9.546e-06, "loss": 0.0714, "step": 3185 }, { "epoch": 5.87903970452447, "grad_norm": 0.7912101745605469, "learning_rate": 9.549000000000001e-06, "loss": 0.0641, "step": 3186 }, { "epoch": 5.880886426592798, "grad_norm": 0.670253574848175, "learning_rate": 9.552000000000001e-06, "loss": 0.0504, "step": 3187 }, { "epoch": 5.882733148661126, "grad_norm": 0.7148275971412659, "learning_rate": 9.555e-06, "loss": 0.0643, "step": 3188 }, { "epoch": 5.884579870729455, "grad_norm": 0.9922288060188293, "learning_rate": 9.558e-06, "loss": 0.0554, "step": 3189 }, { "epoch": 5.886426592797784, "grad_norm": 0.6634912490844727, "learning_rate": 9.561e-06, "loss": 0.0611, "step": 3190 }, { "epoch": 5.888273314866113, "grad_norm": 1.007731556892395, "learning_rate": 9.564e-06, "loss": 0.0503, "step": 3191 }, { "epoch": 5.890120036934442, "grad_norm": 0.8372678160667419, "learning_rate": 9.567e-06, "loss": 0.0423, "step": 3192 }, { "epoch": 5.89196675900277, "grad_norm": 0.6674429774284363, "learning_rate": 9.57e-06, "loss": 0.0487, "step": 3193 }, { "epoch": 5.893813481071099, "grad_norm": 0.8218016624450684, "learning_rate": 9.573e-06, "loss": 0.0496, "step": 3194 }, { "epoch": 5.895660203139427, "grad_norm": 0.6602591872215271, "learning_rate": 9.576e-06, "loss": 0.0436, "step": 3195 }, { "epoch": 5.8975069252077565, "grad_norm": 0.7583157420158386, "learning_rate": 9.579e-06, "loss": 0.0731, "step": 3196 }, { "epoch": 5.899353647276085, "grad_norm": 0.7678322792053223, "learning_rate": 9.582e-06, "loss": 0.0645, "step": 3197 }, { "epoch": 5.901200369344414, "grad_norm": 1.025427222251892, "learning_rate": 9.585e-06, "loss": 0.0662, "step": 3198 }, { "epoch": 5.903047091412742, "grad_norm": 0.934942901134491, "learning_rate": 9.588e-06, "loss": 0.0684, "step": 3199 }, { "epoch": 5.904893813481071, "grad_norm": 1.957726001739502, "learning_rate": 9.591e-06, "loss": 0.0433, "step": 3200 }, { "epoch": 5.9067405355494, "grad_norm": 0.7961621880531311, "learning_rate": 9.594e-06, "loss": 0.0569, "step": 3201 }, { "epoch": 5.908587257617729, "grad_norm": 1.2947304248809814, "learning_rate": 9.597000000000001e-06, "loss": 0.0738, "step": 3202 }, { "epoch": 5.910433979686057, "grad_norm": 1.006197214126587, "learning_rate": 9.600000000000001e-06, "loss": 0.0679, "step": 3203 }, { "epoch": 5.912280701754386, "grad_norm": 0.720492422580719, "learning_rate": 9.603e-06, "loss": 0.0399, "step": 3204 }, { "epoch": 5.914127423822714, "grad_norm": 0.9666870832443237, "learning_rate": 9.606e-06, "loss": 0.0717, "step": 3205 }, { "epoch": 5.9159741458910435, "grad_norm": 1.069618821144104, "learning_rate": 9.608999999999999e-06, "loss": 0.0657, "step": 3206 }, { "epoch": 5.917820867959372, "grad_norm": 0.7088072299957275, "learning_rate": 9.612e-06, "loss": 0.0436, "step": 3207 }, { "epoch": 5.919667590027701, "grad_norm": 0.7209672331809998, "learning_rate": 9.615e-06, "loss": 0.0622, "step": 3208 }, { "epoch": 5.921514312096029, "grad_norm": 1.0887486934661865, "learning_rate": 9.618e-06, "loss": 0.0417, "step": 3209 }, { "epoch": 5.923361034164358, "grad_norm": 1.448907494544983, "learning_rate": 9.621e-06, "loss": 0.1023, "step": 3210 }, { "epoch": 5.9252077562326875, "grad_norm": 2.245453119277954, "learning_rate": 9.624e-06, "loss": 0.3862, "step": 3211 }, { "epoch": 5.927054478301016, "grad_norm": 1.4048337936401367, "learning_rate": 9.627000000000001e-06, "loss": 0.3284, "step": 3212 }, { "epoch": 5.928901200369344, "grad_norm": 1.3502554893493652, "learning_rate": 9.630000000000001e-06, "loss": 0.2569, "step": 3213 }, { "epoch": 5.930747922437673, "grad_norm": 1.6020257472991943, "learning_rate": 9.633e-06, "loss": 0.3036, "step": 3214 }, { "epoch": 5.932594644506002, "grad_norm": 1.1737459897994995, "learning_rate": 9.636e-06, "loss": 0.2589, "step": 3215 }, { "epoch": 5.9344413665743305, "grad_norm": 1.5264931917190552, "learning_rate": 9.638999999999999e-06, "loss": 0.1982, "step": 3216 }, { "epoch": 5.93628808864266, "grad_norm": 1.0091787576675415, "learning_rate": 9.642e-06, "loss": 0.241, "step": 3217 }, { "epoch": 5.938134810710988, "grad_norm": 1.1220636367797852, "learning_rate": 9.645e-06, "loss": 0.2123, "step": 3218 }, { "epoch": 5.939981532779317, "grad_norm": 1.072777271270752, "learning_rate": 9.648e-06, "loss": 0.155, "step": 3219 }, { "epoch": 5.941828254847645, "grad_norm": 1.2503160238265991, "learning_rate": 9.651e-06, "loss": 0.2424, "step": 3220 }, { "epoch": 5.9436749769159745, "grad_norm": 2.789108991622925, "learning_rate": 9.654e-06, "loss": 0.1637, "step": 3221 }, { "epoch": 5.945521698984303, "grad_norm": 0.9609374403953552, "learning_rate": 9.657000000000001e-06, "loss": 0.1563, "step": 3222 }, { "epoch": 5.947368421052632, "grad_norm": 3.0387258529663086, "learning_rate": 9.66e-06, "loss": 0.0993, "step": 3223 }, { "epoch": 5.94921514312096, "grad_norm": 0.9588826298713684, "learning_rate": 9.663e-06, "loss": 0.0646, "step": 3224 }, { "epoch": 5.951061865189289, "grad_norm": 1.6367828845977783, "learning_rate": 9.666e-06, "loss": 0.073, "step": 3225 }, { "epoch": 5.9529085872576175, "grad_norm": 0.6142951250076294, "learning_rate": 9.669e-06, "loss": 0.0714, "step": 3226 }, { "epoch": 5.954755309325947, "grad_norm": 1.2287521362304688, "learning_rate": 9.672000000000002e-06, "loss": 0.11, "step": 3227 }, { "epoch": 5.956602031394275, "grad_norm": 1.094412088394165, "learning_rate": 9.675e-06, "loss": 0.0484, "step": 3228 }, { "epoch": 5.958448753462604, "grad_norm": 0.4351859986782074, "learning_rate": 9.678e-06, "loss": 0.0596, "step": 3229 }, { "epoch": 5.960295475530932, "grad_norm": 0.7879501581192017, "learning_rate": 9.681e-06, "loss": 0.0579, "step": 3230 }, { "epoch": 5.9621421975992615, "grad_norm": 0.7965291142463684, "learning_rate": 9.683999999999999e-06, "loss": 0.0707, "step": 3231 }, { "epoch": 5.96398891966759, "grad_norm": 0.8367858529090881, "learning_rate": 9.687e-06, "loss": 0.0547, "step": 3232 }, { "epoch": 5.965835641735919, "grad_norm": 0.7109565138816833, "learning_rate": 9.69e-06, "loss": 0.0538, "step": 3233 }, { "epoch": 5.967682363804247, "grad_norm": 0.7826809287071228, "learning_rate": 9.693e-06, "loss": 0.0403, "step": 3234 }, { "epoch": 5.969529085872576, "grad_norm": 0.9229626655578613, "learning_rate": 9.696e-06, "loss": 0.1065, "step": 3235 }, { "epoch": 5.971375807940905, "grad_norm": 0.7484771013259888, "learning_rate": 9.699e-06, "loss": 0.0458, "step": 3236 }, { "epoch": 5.973222530009234, "grad_norm": 0.6975681781768799, "learning_rate": 9.702000000000001e-06, "loss": 0.0373, "step": 3237 }, { "epoch": 5.975069252077562, "grad_norm": 0.6109060645103455, "learning_rate": 9.705000000000001e-06, "loss": 0.0515, "step": 3238 }, { "epoch": 5.976915974145891, "grad_norm": 1.058773159980774, "learning_rate": 9.708000000000001e-06, "loss": 0.0931, "step": 3239 }, { "epoch": 5.97876269621422, "grad_norm": 1.0566517114639282, "learning_rate": 9.711e-06, "loss": 0.0656, "step": 3240 }, { "epoch": 5.980609418282548, "grad_norm": 1.0419882535934448, "learning_rate": 9.713999999999999e-06, "loss": 0.1314, "step": 3241 }, { "epoch": 5.982456140350877, "grad_norm": 1.0443799495697021, "learning_rate": 9.717e-06, "loss": 0.0758, "step": 3242 }, { "epoch": 5.984302862419206, "grad_norm": 0.9809887409210205, "learning_rate": 9.72e-06, "loss": 0.061, "step": 3243 }, { "epoch": 5.986149584487535, "grad_norm": 0.9536335468292236, "learning_rate": 9.723e-06, "loss": 0.0615, "step": 3244 }, { "epoch": 5.987996306555863, "grad_norm": 1.8318169116973877, "learning_rate": 9.726e-06, "loss": 0.0542, "step": 3245 }, { "epoch": 5.989843028624192, "grad_norm": 1.001519799232483, "learning_rate": 9.729e-06, "loss": 0.0761, "step": 3246 }, { "epoch": 5.991689750692521, "grad_norm": 0.8341793417930603, "learning_rate": 9.732000000000001e-06, "loss": 0.0454, "step": 3247 }, { "epoch": 5.99353647276085, "grad_norm": 1.3690731525421143, "learning_rate": 9.735e-06, "loss": 0.0554, "step": 3248 }, { "epoch": 5.995383194829178, "grad_norm": 1.326370120048523, "learning_rate": 9.738e-06, "loss": 0.1149, "step": 3249 }, { "epoch": 5.997229916897507, "grad_norm": 1.7953455448150635, "learning_rate": 9.741e-06, "loss": 0.1143, "step": 3250 }, { "epoch": 5.999076638965835, "grad_norm": 1.6124578714370728, "learning_rate": 9.744e-06, "loss": 0.0628, "step": 3251 }, { "epoch": 6.0, "grad_norm": 0.8392724394798279, "learning_rate": 9.747000000000002e-06, "loss": 0.0372, "step": 3252 }, { "epoch": 6.001846722068329, "grad_norm": 4.04020881652832, "learning_rate": 9.75e-06, "loss": 0.4651, "step": 3253 }, { "epoch": 6.003693444136657, "grad_norm": 1.0397669076919556, "learning_rate": 9.753e-06, "loss": 0.309, "step": 3254 }, { "epoch": 6.0055401662049865, "grad_norm": 0.9423261284828186, "learning_rate": 9.756e-06, "loss": 0.2448, "step": 3255 }, { "epoch": 6.007386888273315, "grad_norm": 1.156479001045227, "learning_rate": 9.759e-06, "loss": 0.2676, "step": 3256 }, { "epoch": 6.009233610341644, "grad_norm": 1.2824240922927856, "learning_rate": 9.762e-06, "loss": 0.2602, "step": 3257 }, { "epoch": 6.011080332409972, "grad_norm": 1.766750693321228, "learning_rate": 9.765e-06, "loss": 0.2751, "step": 3258 }, { "epoch": 6.012927054478301, "grad_norm": 1.0178364515304565, "learning_rate": 9.768e-06, "loss": 0.2036, "step": 3259 }, { "epoch": 6.01477377654663, "grad_norm": 1.1608028411865234, "learning_rate": 9.771e-06, "loss": 0.2316, "step": 3260 }, { "epoch": 6.016620498614959, "grad_norm": 0.907696545124054, "learning_rate": 9.774e-06, "loss": 0.1741, "step": 3261 }, { "epoch": 6.018467220683287, "grad_norm": 0.8990158438682556, "learning_rate": 9.777000000000001e-06, "loss": 0.1648, "step": 3262 }, { "epoch": 6.020313942751616, "grad_norm": 3.445403575897217, "learning_rate": 9.780000000000001e-06, "loss": 0.1243, "step": 3263 }, { "epoch": 6.022160664819944, "grad_norm": 1.2290663719177246, "learning_rate": 9.783000000000001e-06, "loss": 0.1855, "step": 3264 }, { "epoch": 6.0240073868882735, "grad_norm": 1.3456010818481445, "learning_rate": 9.785999999999999e-06, "loss": 0.1791, "step": 3265 }, { "epoch": 6.025854108956602, "grad_norm": 0.5856161713600159, "learning_rate": 9.788999999999999e-06, "loss": 0.07, "step": 3266 }, { "epoch": 6.027700831024931, "grad_norm": 1.00753915309906, "learning_rate": 9.792e-06, "loss": 0.0652, "step": 3267 }, { "epoch": 6.029547553093259, "grad_norm": 2.3274526596069336, "learning_rate": 9.795e-06, "loss": 0.1259, "step": 3268 }, { "epoch": 6.031394275161588, "grad_norm": 0.6562830209732056, "learning_rate": 9.798e-06, "loss": 0.0732, "step": 3269 }, { "epoch": 6.033240997229917, "grad_norm": 0.6230347156524658, "learning_rate": 9.801e-06, "loss": 0.0442, "step": 3270 }, { "epoch": 6.035087719298246, "grad_norm": 1.4482897520065308, "learning_rate": 9.804e-06, "loss": 0.0849, "step": 3271 }, { "epoch": 6.036934441366574, "grad_norm": 0.9868390560150146, "learning_rate": 9.807000000000001e-06, "loss": 0.1102, "step": 3272 }, { "epoch": 6.038781163434903, "grad_norm": 0.7521336078643799, "learning_rate": 9.810000000000001e-06, "loss": 0.0647, "step": 3273 }, { "epoch": 6.040627885503231, "grad_norm": 0.8148708343505859, "learning_rate": 9.813e-06, "loss": 0.0647, "step": 3274 }, { "epoch": 6.0424746075715605, "grad_norm": 0.6797977089881897, "learning_rate": 9.816e-06, "loss": 0.0503, "step": 3275 }, { "epoch": 6.044321329639889, "grad_norm": 0.947433352470398, "learning_rate": 9.819e-06, "loss": 0.0674, "step": 3276 }, { "epoch": 6.046168051708218, "grad_norm": 0.7319151163101196, "learning_rate": 9.822e-06, "loss": 0.0542, "step": 3277 }, { "epoch": 6.048014773776547, "grad_norm": 0.9400285482406616, "learning_rate": 9.825e-06, "loss": 0.0562, "step": 3278 }, { "epoch": 6.049861495844875, "grad_norm": 0.8096341490745544, "learning_rate": 9.828e-06, "loss": 0.0659, "step": 3279 }, { "epoch": 6.0517082179132045, "grad_norm": 0.7864410877227783, "learning_rate": 9.831e-06, "loss": 0.0398, "step": 3280 }, { "epoch": 6.053554939981533, "grad_norm": 0.5510549545288086, "learning_rate": 9.834e-06, "loss": 0.032, "step": 3281 }, { "epoch": 6.055401662049862, "grad_norm": 0.7736632823944092, "learning_rate": 9.837000000000001e-06, "loss": 0.0455, "step": 3282 }, { "epoch": 6.05724838411819, "grad_norm": 0.6081624031066895, "learning_rate": 9.84e-06, "loss": 0.0418, "step": 3283 }, { "epoch": 6.059095106186519, "grad_norm": 0.4802582263946533, "learning_rate": 9.843e-06, "loss": 0.0372, "step": 3284 }, { "epoch": 6.0609418282548475, "grad_norm": 0.7252798080444336, "learning_rate": 9.846e-06, "loss": 0.0533, "step": 3285 }, { "epoch": 6.062788550323177, "grad_norm": 1.2600115537643433, "learning_rate": 9.849e-06, "loss": 0.0556, "step": 3286 }, { "epoch": 6.064635272391505, "grad_norm": 0.8759087324142456, "learning_rate": 9.852000000000002e-06, "loss": 0.0525, "step": 3287 }, { "epoch": 6.066481994459834, "grad_norm": 0.804162323474884, "learning_rate": 9.855000000000001e-06, "loss": 0.0419, "step": 3288 }, { "epoch": 6.068328716528162, "grad_norm": 1.3809750080108643, "learning_rate": 9.858000000000001e-06, "loss": 0.0605, "step": 3289 }, { "epoch": 6.0701754385964914, "grad_norm": 0.7179027199745178, "learning_rate": 9.861e-06, "loss": 0.0287, "step": 3290 }, { "epoch": 6.07202216066482, "grad_norm": 0.9235922694206238, "learning_rate": 9.863999999999999e-06, "loss": 0.0409, "step": 3291 }, { "epoch": 6.073868882733149, "grad_norm": 0.9280220866203308, "learning_rate": 9.867e-06, "loss": 0.0469, "step": 3292 }, { "epoch": 6.075715604801477, "grad_norm": 0.7803265452384949, "learning_rate": 9.87e-06, "loss": 0.0495, "step": 3293 }, { "epoch": 6.077562326869806, "grad_norm": 1.1641448736190796, "learning_rate": 9.873e-06, "loss": 0.0719, "step": 3294 }, { "epoch": 6.0794090489381345, "grad_norm": 1.104889988899231, "learning_rate": 9.876e-06, "loss": 0.0526, "step": 3295 }, { "epoch": 6.081255771006464, "grad_norm": 0.7316168546676636, "learning_rate": 9.879e-06, "loss": 0.0464, "step": 3296 }, { "epoch": 6.083102493074792, "grad_norm": 0.9376199841499329, "learning_rate": 9.882000000000001e-06, "loss": 0.0445, "step": 3297 }, { "epoch": 6.084949215143121, "grad_norm": 1.3941532373428345, "learning_rate": 9.885000000000001e-06, "loss": 0.0469, "step": 3298 }, { "epoch": 6.086795937211449, "grad_norm": 1.170428991317749, "learning_rate": 9.888000000000001e-06, "loss": 0.0497, "step": 3299 }, { "epoch": 6.088642659279778, "grad_norm": 1.5209548473358154, "learning_rate": 9.891e-06, "loss": 0.0568, "step": 3300 }, { "epoch": 6.090489381348107, "grad_norm": 1.155193567276001, "learning_rate": 9.894e-06, "loss": 0.0565, "step": 3301 }, { "epoch": 6.092336103416436, "grad_norm": 2.590714454650879, "learning_rate": 9.897e-06, "loss": 0.0667, "step": 3302 }, { "epoch": 6.094182825484765, "grad_norm": 3.278620719909668, "learning_rate": 9.9e-06, "loss": 0.3806, "step": 3303 }, { "epoch": 6.096029547553093, "grad_norm": 1.313486933708191, "learning_rate": 9.903e-06, "loss": 0.2242, "step": 3304 }, { "epoch": 6.097876269621422, "grad_norm": 1.0118507146835327, "learning_rate": 9.906e-06, "loss": 0.2646, "step": 3305 }, { "epoch": 6.099722991689751, "grad_norm": 1.1909503936767578, "learning_rate": 9.909e-06, "loss": 0.2251, "step": 3306 }, { "epoch": 6.10156971375808, "grad_norm": 0.9937542080879211, "learning_rate": 9.912000000000001e-06, "loss": 0.3316, "step": 3307 }, { "epoch": 6.103416435826408, "grad_norm": 1.0816807746887207, "learning_rate": 9.915e-06, "loss": 0.2589, "step": 3308 }, { "epoch": 6.105263157894737, "grad_norm": 1.0621750354766846, "learning_rate": 9.918e-06, "loss": 0.1592, "step": 3309 }, { "epoch": 6.107109879963065, "grad_norm": 1.1883070468902588, "learning_rate": 9.921e-06, "loss": 0.1717, "step": 3310 }, { "epoch": 6.108956602031395, "grad_norm": 0.8285554051399231, "learning_rate": 9.924e-06, "loss": 0.1371, "step": 3311 }, { "epoch": 6.110803324099723, "grad_norm": 0.8814343810081482, "learning_rate": 9.927000000000002e-06, "loss": 0.1727, "step": 3312 }, { "epoch": 6.112650046168052, "grad_norm": 1.0420233011245728, "learning_rate": 9.930000000000001e-06, "loss": 0.1265, "step": 3313 }, { "epoch": 6.11449676823638, "grad_norm": 2.53212833404541, "learning_rate": 9.933e-06, "loss": 0.1131, "step": 3314 }, { "epoch": 6.116343490304709, "grad_norm": 1.1509301662445068, "learning_rate": 9.936e-06, "loss": 0.116, "step": 3315 }, { "epoch": 6.118190212373038, "grad_norm": 1.1868430376052856, "learning_rate": 9.939e-06, "loss": 0.1217, "step": 3316 }, { "epoch": 6.120036934441367, "grad_norm": 0.652368426322937, "learning_rate": 9.941999999999999e-06, "loss": 0.0611, "step": 3317 }, { "epoch": 6.121883656509695, "grad_norm": 0.6359832882881165, "learning_rate": 9.945e-06, "loss": 0.0494, "step": 3318 }, { "epoch": 6.123730378578024, "grad_norm": 0.8224269151687622, "learning_rate": 9.948e-06, "loss": 0.0672, "step": 3319 }, { "epoch": 6.125577100646352, "grad_norm": 0.6548646092414856, "learning_rate": 9.951e-06, "loss": 0.0513, "step": 3320 }, { "epoch": 6.127423822714682, "grad_norm": 0.6966165900230408, "learning_rate": 9.954e-06, "loss": 0.0395, "step": 3321 }, { "epoch": 6.12927054478301, "grad_norm": 0.4571305513381958, "learning_rate": 9.957e-06, "loss": 0.0374, "step": 3322 }, { "epoch": 6.131117266851339, "grad_norm": 0.6777710318565369, "learning_rate": 9.960000000000001e-06, "loss": 0.0372, "step": 3323 }, { "epoch": 6.132963988919667, "grad_norm": 0.8258435726165771, "learning_rate": 9.963000000000001e-06, "loss": 0.0537, "step": 3324 }, { "epoch": 6.134810710987996, "grad_norm": 0.6122437715530396, "learning_rate": 9.966e-06, "loss": 0.0385, "step": 3325 }, { "epoch": 6.136657433056325, "grad_norm": 0.7694428563117981, "learning_rate": 9.969e-06, "loss": 0.0367, "step": 3326 }, { "epoch": 6.138504155124654, "grad_norm": 0.7247572541236877, "learning_rate": 9.971999999999999e-06, "loss": 0.0426, "step": 3327 }, { "epoch": 6.140350877192983, "grad_norm": 0.7571830153465271, "learning_rate": 9.975e-06, "loss": 0.0513, "step": 3328 }, { "epoch": 6.142197599261311, "grad_norm": 0.9954696893692017, "learning_rate": 9.978e-06, "loss": 0.0392, "step": 3329 }, { "epoch": 6.14404432132964, "grad_norm": 1.1376701593399048, "learning_rate": 9.981e-06, "loss": 0.0635, "step": 3330 }, { "epoch": 6.1458910433979685, "grad_norm": 0.7513870000839233, "learning_rate": 9.984e-06, "loss": 0.0433, "step": 3331 }, { "epoch": 6.147737765466298, "grad_norm": 1.4481124877929688, "learning_rate": 9.987e-06, "loss": 0.0551, "step": 3332 }, { "epoch": 6.149584487534626, "grad_norm": 0.7296556830406189, "learning_rate": 9.990000000000001e-06, "loss": 0.0488, "step": 3333 }, { "epoch": 6.151431209602955, "grad_norm": 0.6062530279159546, "learning_rate": 9.993e-06, "loss": 0.0348, "step": 3334 }, { "epoch": 6.153277931671283, "grad_norm": 0.5732892751693726, "learning_rate": 9.996e-06, "loss": 0.0416, "step": 3335 }, { "epoch": 6.1551246537396125, "grad_norm": 0.7242116332054138, "learning_rate": 9.999e-06, "loss": 0.0382, "step": 3336 }, { "epoch": 6.156971375807941, "grad_norm": 0.647824764251709, "learning_rate": 1.0002e-05, "loss": 0.0386, "step": 3337 }, { "epoch": 6.15881809787627, "grad_norm": 0.9212784767150879, "learning_rate": 1.0005000000000002e-05, "loss": 0.0536, "step": 3338 }, { "epoch": 6.160664819944598, "grad_norm": 0.6482663750648499, "learning_rate": 1.0008e-05, "loss": 0.0366, "step": 3339 }, { "epoch": 6.162511542012927, "grad_norm": 1.459087610244751, "learning_rate": 1.0011e-05, "loss": 0.0496, "step": 3340 }, { "epoch": 6.1643582640812555, "grad_norm": 0.783748984336853, "learning_rate": 1.0014e-05, "loss": 0.0367, "step": 3341 }, { "epoch": 6.166204986149585, "grad_norm": 0.9266418814659119, "learning_rate": 1.0016999999999999e-05, "loss": 0.0532, "step": 3342 }, { "epoch": 6.168051708217913, "grad_norm": 0.7052007913589478, "learning_rate": 1.002e-05, "loss": 0.0372, "step": 3343 }, { "epoch": 6.169898430286242, "grad_norm": 0.6519185900688171, "learning_rate": 1.0023e-05, "loss": 0.0545, "step": 3344 }, { "epoch": 6.17174515235457, "grad_norm": 1.20285964012146, "learning_rate": 1.0026e-05, "loss": 0.0547, "step": 3345 }, { "epoch": 6.1735918744228995, "grad_norm": 0.860680341720581, "learning_rate": 1.0029e-05, "loss": 0.0724, "step": 3346 }, { "epoch": 6.175438596491228, "grad_norm": 1.4246987104415894, "learning_rate": 1.0032e-05, "loss": 0.0394, "step": 3347 }, { "epoch": 6.177285318559557, "grad_norm": 0.8983826041221619, "learning_rate": 1.0035000000000001e-05, "loss": 0.0629, "step": 3348 }, { "epoch": 6.179132040627885, "grad_norm": 1.397816777229309, "learning_rate": 1.0038000000000001e-05, "loss": 0.1254, "step": 3349 }, { "epoch": 6.180978762696214, "grad_norm": 1.3739649057388306, "learning_rate": 1.0041000000000001e-05, "loss": 0.0647, "step": 3350 }, { "epoch": 6.1828254847645425, "grad_norm": 1.1440520286560059, "learning_rate": 1.0043999999999999e-05, "loss": 0.0846, "step": 3351 }, { "epoch": 6.184672206832872, "grad_norm": 0.9402255415916443, "learning_rate": 1.0046999999999999e-05, "loss": 0.0599, "step": 3352 }, { "epoch": 6.186518928901201, "grad_norm": 1.4966752529144287, "learning_rate": 1.005e-05, "loss": 0.3815, "step": 3353 }, { "epoch": 6.188365650969529, "grad_norm": 2.027479410171509, "learning_rate": 1.0053e-05, "loss": 0.4318, "step": 3354 }, { "epoch": 6.190212373037858, "grad_norm": 1.0403293371200562, "learning_rate": 1.0056e-05, "loss": 0.2883, "step": 3355 }, { "epoch": 6.1920590951061865, "grad_norm": 1.2776825428009033, "learning_rate": 1.0059e-05, "loss": 0.2647, "step": 3356 }, { "epoch": 6.193905817174516, "grad_norm": 1.1058181524276733, "learning_rate": 1.0062e-05, "loss": 0.3148, "step": 3357 }, { "epoch": 6.195752539242844, "grad_norm": 1.2859538793563843, "learning_rate": 1.0065000000000001e-05, "loss": 0.2302, "step": 3358 }, { "epoch": 6.197599261311173, "grad_norm": 0.7337931990623474, "learning_rate": 1.0068e-05, "loss": 0.1434, "step": 3359 }, { "epoch": 6.199445983379501, "grad_norm": 0.8204769492149353, "learning_rate": 1.0071e-05, "loss": 0.171, "step": 3360 }, { "epoch": 6.20129270544783, "grad_norm": 1.006606936454773, "learning_rate": 1.0074e-05, "loss": 0.1954, "step": 3361 }, { "epoch": 6.203139427516159, "grad_norm": 1.0071405172348022, "learning_rate": 1.0077e-05, "loss": 0.1727, "step": 3362 }, { "epoch": 6.204986149584488, "grad_norm": 1.1923887729644775, "learning_rate": 1.008e-05, "loss": 0.1355, "step": 3363 }, { "epoch": 6.206832871652816, "grad_norm": 0.8156916499137878, "learning_rate": 1.0083e-05, "loss": 0.1401, "step": 3364 }, { "epoch": 6.208679593721145, "grad_norm": 0.9403882622718811, "learning_rate": 1.0086e-05, "loss": 0.1544, "step": 3365 }, { "epoch": 6.2105263157894735, "grad_norm": 1.117945909500122, "learning_rate": 1.0089e-05, "loss": 0.1383, "step": 3366 }, { "epoch": 6.212373037857803, "grad_norm": 0.7493522763252258, "learning_rate": 1.0092e-05, "loss": 0.0692, "step": 3367 }, { "epoch": 6.214219759926131, "grad_norm": 0.5226987600326538, "learning_rate": 1.0095e-05, "loss": 0.0472, "step": 3368 }, { "epoch": 6.21606648199446, "grad_norm": 0.7401248812675476, "learning_rate": 1.0098e-05, "loss": 0.1062, "step": 3369 }, { "epoch": 6.217913204062788, "grad_norm": 1.0519952774047852, "learning_rate": 1.0101e-05, "loss": 0.0539, "step": 3370 }, { "epoch": 6.219759926131117, "grad_norm": 0.8793032765388489, "learning_rate": 1.0104e-05, "loss": 0.0369, "step": 3371 }, { "epoch": 6.221606648199446, "grad_norm": 0.48777708411216736, "learning_rate": 1.0107e-05, "loss": 0.0343, "step": 3372 }, { "epoch": 6.223453370267775, "grad_norm": 0.5694409608840942, "learning_rate": 1.0110000000000001e-05, "loss": 0.0572, "step": 3373 }, { "epoch": 6.225300092336103, "grad_norm": 0.7707429528236389, "learning_rate": 1.0113000000000001e-05, "loss": 0.0681, "step": 3374 }, { "epoch": 6.227146814404432, "grad_norm": 0.722528338432312, "learning_rate": 1.0116000000000001e-05, "loss": 0.0453, "step": 3375 }, { "epoch": 6.22899353647276, "grad_norm": 0.834453821182251, "learning_rate": 1.0119e-05, "loss": 0.0526, "step": 3376 }, { "epoch": 6.23084025854109, "grad_norm": 0.9907567501068115, "learning_rate": 1.0121999999999999e-05, "loss": 0.0596, "step": 3377 }, { "epoch": 6.232686980609419, "grad_norm": 0.581995964050293, "learning_rate": 1.0125e-05, "loss": 0.029, "step": 3378 }, { "epoch": 6.234533702677747, "grad_norm": 0.6852633357048035, "learning_rate": 1.0128e-05, "loss": 0.0362, "step": 3379 }, { "epoch": 6.236380424746076, "grad_norm": 0.7753766179084778, "learning_rate": 1.0131e-05, "loss": 0.0437, "step": 3380 }, { "epoch": 6.238227146814404, "grad_norm": 0.6445023417472839, "learning_rate": 1.0134e-05, "loss": 0.0413, "step": 3381 }, { "epoch": 6.2400738688827335, "grad_norm": 0.8584773540496826, "learning_rate": 1.0137e-05, "loss": 0.0544, "step": 3382 }, { "epoch": 6.241920590951062, "grad_norm": 0.7375549674034119, "learning_rate": 1.0140000000000001e-05, "loss": 0.0642, "step": 3383 }, { "epoch": 6.243767313019391, "grad_norm": 2.494497299194336, "learning_rate": 1.0143000000000001e-05, "loss": 0.0714, "step": 3384 }, { "epoch": 6.245614035087719, "grad_norm": 0.6620352864265442, "learning_rate": 1.0146e-05, "loss": 0.0398, "step": 3385 }, { "epoch": 6.247460757156048, "grad_norm": 0.6307715773582458, "learning_rate": 1.0149e-05, "loss": 0.0546, "step": 3386 }, { "epoch": 6.249307479224377, "grad_norm": 0.8055571913719177, "learning_rate": 1.0152e-05, "loss": 0.0494, "step": 3387 }, { "epoch": 6.251154201292706, "grad_norm": 0.9565600156784058, "learning_rate": 1.0155e-05, "loss": 0.0883, "step": 3388 }, { "epoch": 6.253000923361034, "grad_norm": 0.9024067521095276, "learning_rate": 1.0158e-05, "loss": 0.0558, "step": 3389 }, { "epoch": 6.254847645429363, "grad_norm": 1.121073603630066, "learning_rate": 1.0161e-05, "loss": 0.0572, "step": 3390 }, { "epoch": 6.256694367497691, "grad_norm": 0.8038824796676636, "learning_rate": 1.0164e-05, "loss": 0.0578, "step": 3391 }, { "epoch": 6.2585410895660205, "grad_norm": 0.7897089719772339, "learning_rate": 1.0167e-05, "loss": 0.051, "step": 3392 }, { "epoch": 6.260387811634349, "grad_norm": 0.7924173474311829, "learning_rate": 1.0170000000000001e-05, "loss": 0.0607, "step": 3393 }, { "epoch": 6.262234533702678, "grad_norm": 0.8015258312225342, "learning_rate": 1.0173e-05, "loss": 0.0534, "step": 3394 }, { "epoch": 6.264081255771006, "grad_norm": 0.79560387134552, "learning_rate": 1.0176e-05, "loss": 0.0355, "step": 3395 }, { "epoch": 6.265927977839335, "grad_norm": 0.7222800254821777, "learning_rate": 1.0179e-05, "loss": 0.0546, "step": 3396 }, { "epoch": 6.267774699907664, "grad_norm": 0.8553308248519897, "learning_rate": 1.0182e-05, "loss": 0.061, "step": 3397 }, { "epoch": 6.269621421975993, "grad_norm": 1.2343560457229614, "learning_rate": 1.0185000000000002e-05, "loss": 0.0505, "step": 3398 }, { "epoch": 6.271468144044321, "grad_norm": 0.755364179611206, "learning_rate": 1.0188000000000001e-05, "loss": 0.0632, "step": 3399 }, { "epoch": 6.27331486611265, "grad_norm": 1.284324288368225, "learning_rate": 1.0191e-05, "loss": 0.0499, "step": 3400 }, { "epoch": 6.275161588180978, "grad_norm": 0.6206008195877075, "learning_rate": 1.0194e-05, "loss": 0.0418, "step": 3401 }, { "epoch": 6.2770083102493075, "grad_norm": 1.1747897863388062, "learning_rate": 1.0196999999999999e-05, "loss": 0.0765, "step": 3402 }, { "epoch": 6.278855032317637, "grad_norm": 1.0085206031799316, "learning_rate": 1.02e-05, "loss": 0.3388, "step": 3403 }, { "epoch": 6.280701754385965, "grad_norm": 1.1853443384170532, "learning_rate": 1.0203e-05, "loss": 0.3431, "step": 3404 }, { "epoch": 6.282548476454294, "grad_norm": 0.9081771969795227, "learning_rate": 1.0206e-05, "loss": 0.2456, "step": 3405 }, { "epoch": 6.284395198522622, "grad_norm": 1.0242339372634888, "learning_rate": 1.0209e-05, "loss": 0.2397, "step": 3406 }, { "epoch": 6.286241920590951, "grad_norm": 0.9105067849159241, "learning_rate": 1.0212e-05, "loss": 0.2331, "step": 3407 }, { "epoch": 6.28808864265928, "grad_norm": 0.6736548542976379, "learning_rate": 1.0215000000000001e-05, "loss": 0.1877, "step": 3408 }, { "epoch": 6.289935364727609, "grad_norm": 0.7968747019767761, "learning_rate": 1.0218000000000001e-05, "loss": 0.1593, "step": 3409 }, { "epoch": 6.291782086795937, "grad_norm": 0.8158178925514221, "learning_rate": 1.0221000000000001e-05, "loss": 0.1985, "step": 3410 }, { "epoch": 6.293628808864266, "grad_norm": 1.3161625862121582, "learning_rate": 1.0224e-05, "loss": 0.2134, "step": 3411 }, { "epoch": 6.2954755309325945, "grad_norm": 0.9767283797264099, "learning_rate": 1.0227e-05, "loss": 0.2046, "step": 3412 }, { "epoch": 6.297322253000924, "grad_norm": 1.8502895832061768, "learning_rate": 1.023e-05, "loss": 0.1817, "step": 3413 }, { "epoch": 6.299168975069252, "grad_norm": 0.7292230129241943, "learning_rate": 1.0233e-05, "loss": 0.0889, "step": 3414 }, { "epoch": 6.301015697137581, "grad_norm": 0.9305237531661987, "learning_rate": 1.0236e-05, "loss": 0.1109, "step": 3415 }, { "epoch": 6.302862419205909, "grad_norm": 0.6266627311706543, "learning_rate": 1.0239e-05, "loss": 0.0741, "step": 3416 }, { "epoch": 6.304709141274238, "grad_norm": 0.7265087962150574, "learning_rate": 1.0242e-05, "loss": 0.1007, "step": 3417 }, { "epoch": 6.306555863342567, "grad_norm": 0.4341509938240051, "learning_rate": 1.0245000000000001e-05, "loss": 0.0363, "step": 3418 }, { "epoch": 6.308402585410896, "grad_norm": 0.3478453755378723, "learning_rate": 1.0248e-05, "loss": 0.0328, "step": 3419 }, { "epoch": 6.310249307479224, "grad_norm": 0.7002639174461365, "learning_rate": 1.0251e-05, "loss": 0.0809, "step": 3420 }, { "epoch": 6.312096029547553, "grad_norm": 0.5919689536094666, "learning_rate": 1.0254e-05, "loss": 0.0449, "step": 3421 }, { "epoch": 6.3139427516158815, "grad_norm": 0.6110935211181641, "learning_rate": 1.0257e-05, "loss": 0.0483, "step": 3422 }, { "epoch": 6.315789473684211, "grad_norm": 0.699510395526886, "learning_rate": 1.0260000000000002e-05, "loss": 0.0377, "step": 3423 }, { "epoch": 6.317636195752539, "grad_norm": 0.6981428861618042, "learning_rate": 1.0263000000000002e-05, "loss": 0.048, "step": 3424 }, { "epoch": 6.319482917820868, "grad_norm": 1.3745615482330322, "learning_rate": 1.0266e-05, "loss": 0.0736, "step": 3425 }, { "epoch": 6.321329639889196, "grad_norm": 0.9349688291549683, "learning_rate": 1.0269e-05, "loss": 0.0494, "step": 3426 }, { "epoch": 6.323176361957525, "grad_norm": 1.4682650566101074, "learning_rate": 1.0272e-05, "loss": 0.078, "step": 3427 }, { "epoch": 6.325023084025855, "grad_norm": 0.8241517543792725, "learning_rate": 1.0275e-05, "loss": 0.0775, "step": 3428 }, { "epoch": 6.326869806094183, "grad_norm": 1.6067695617675781, "learning_rate": 1.0278e-05, "loss": 0.1381, "step": 3429 }, { "epoch": 6.328716528162512, "grad_norm": 0.7926170229911804, "learning_rate": 1.0281e-05, "loss": 0.0527, "step": 3430 }, { "epoch": 6.33056325023084, "grad_norm": 1.9036262035369873, "learning_rate": 1.0284e-05, "loss": 0.0394, "step": 3431 }, { "epoch": 6.332409972299169, "grad_norm": 0.6403128504753113, "learning_rate": 1.0287e-05, "loss": 0.0793, "step": 3432 }, { "epoch": 6.334256694367498, "grad_norm": 0.6075448989868164, "learning_rate": 1.0290000000000001e-05, "loss": 0.049, "step": 3433 }, { "epoch": 6.336103416435827, "grad_norm": 0.9043646454811096, "learning_rate": 1.0293000000000001e-05, "loss": 0.0559, "step": 3434 }, { "epoch": 6.337950138504155, "grad_norm": 0.9564310908317566, "learning_rate": 1.0296000000000001e-05, "loss": 0.0372, "step": 3435 }, { "epoch": 6.339796860572484, "grad_norm": 0.5282099843025208, "learning_rate": 1.0299e-05, "loss": 0.0328, "step": 3436 }, { "epoch": 6.341643582640812, "grad_norm": 1.1436553001403809, "learning_rate": 1.0301999999999999e-05, "loss": 0.0548, "step": 3437 }, { "epoch": 6.3434903047091415, "grad_norm": 0.8616344928741455, "learning_rate": 1.0305e-05, "loss": 0.0488, "step": 3438 }, { "epoch": 6.34533702677747, "grad_norm": 1.3523774147033691, "learning_rate": 1.0308e-05, "loss": 0.0562, "step": 3439 }, { "epoch": 6.347183748845799, "grad_norm": 0.6882216334342957, "learning_rate": 1.0311e-05, "loss": 0.0426, "step": 3440 }, { "epoch": 6.349030470914127, "grad_norm": 1.0950908660888672, "learning_rate": 1.0314e-05, "loss": 0.0582, "step": 3441 }, { "epoch": 6.350877192982456, "grad_norm": 1.109289526939392, "learning_rate": 1.0317e-05, "loss": 0.0602, "step": 3442 }, { "epoch": 6.352723915050785, "grad_norm": 0.8034445643424988, "learning_rate": 1.032e-05, "loss": 0.0466, "step": 3443 }, { "epoch": 6.354570637119114, "grad_norm": 0.7607554197311401, "learning_rate": 1.0323000000000001e-05, "loss": 0.0351, "step": 3444 }, { "epoch": 6.356417359187442, "grad_norm": 0.9530297517776489, "learning_rate": 1.0326e-05, "loss": 0.0422, "step": 3445 }, { "epoch": 6.358264081255771, "grad_norm": 1.066715955734253, "learning_rate": 1.0329e-05, "loss": 0.0728, "step": 3446 }, { "epoch": 6.360110803324099, "grad_norm": 0.8962160348892212, "learning_rate": 1.0332e-05, "loss": 0.0558, "step": 3447 }, { "epoch": 6.3619575253924285, "grad_norm": 0.6709640026092529, "learning_rate": 1.0335e-05, "loss": 0.0362, "step": 3448 }, { "epoch": 6.363804247460757, "grad_norm": 1.028660774230957, "learning_rate": 1.0338e-05, "loss": 0.0479, "step": 3449 }, { "epoch": 6.365650969529086, "grad_norm": 0.7419813275337219, "learning_rate": 1.0341e-05, "loss": 0.0515, "step": 3450 }, { "epoch": 6.367497691597414, "grad_norm": 1.3043344020843506, "learning_rate": 1.0344e-05, "loss": 0.0608, "step": 3451 }, { "epoch": 6.369344413665743, "grad_norm": 0.9880107641220093, "learning_rate": 1.0347e-05, "loss": 0.0471, "step": 3452 }, { "epoch": 6.3711911357340725, "grad_norm": 1.2809970378875732, "learning_rate": 1.035e-05, "loss": 0.3272, "step": 3453 }, { "epoch": 6.373037857802401, "grad_norm": 1.0759568214416504, "learning_rate": 1.0353e-05, "loss": 0.3555, "step": 3454 }, { "epoch": 6.374884579870729, "grad_norm": 1.1627869606018066, "learning_rate": 1.0356e-05, "loss": 0.3155, "step": 3455 }, { "epoch": 6.376731301939058, "grad_norm": 1.6699895858764648, "learning_rate": 1.0359e-05, "loss": 0.2746, "step": 3456 }, { "epoch": 6.378578024007387, "grad_norm": 0.8494102358818054, "learning_rate": 1.0362e-05, "loss": 0.2381, "step": 3457 }, { "epoch": 6.3804247460757155, "grad_norm": 0.9873664975166321, "learning_rate": 1.0365e-05, "loss": 0.2455, "step": 3458 }, { "epoch": 6.382271468144045, "grad_norm": 1.3834065198898315, "learning_rate": 1.0368000000000001e-05, "loss": 0.2281, "step": 3459 }, { "epoch": 6.384118190212373, "grad_norm": 0.9461009502410889, "learning_rate": 1.0371000000000001e-05, "loss": 0.1776, "step": 3460 }, { "epoch": 6.385964912280702, "grad_norm": 1.184063196182251, "learning_rate": 1.0374000000000001e-05, "loss": 0.2644, "step": 3461 }, { "epoch": 6.38781163434903, "grad_norm": 1.0690431594848633, "learning_rate": 1.0376999999999999e-05, "loss": 0.2075, "step": 3462 }, { "epoch": 6.3896583564173595, "grad_norm": 0.8396578431129456, "learning_rate": 1.0379999999999999e-05, "loss": 0.1208, "step": 3463 }, { "epoch": 6.391505078485688, "grad_norm": 1.136763095855713, "learning_rate": 1.0383e-05, "loss": 0.188, "step": 3464 }, { "epoch": 6.393351800554017, "grad_norm": 0.6293352246284485, "learning_rate": 1.0386e-05, "loss": 0.0897, "step": 3465 }, { "epoch": 6.395198522622345, "grad_norm": 0.8428406715393066, "learning_rate": 1.0389e-05, "loss": 0.0854, "step": 3466 }, { "epoch": 6.397045244690674, "grad_norm": 1.0550479888916016, "learning_rate": 1.0392e-05, "loss": 0.1188, "step": 3467 }, { "epoch": 6.3988919667590025, "grad_norm": 0.7489149570465088, "learning_rate": 1.0395e-05, "loss": 0.0845, "step": 3468 }, { "epoch": 6.400738688827332, "grad_norm": 0.5197301506996155, "learning_rate": 1.0398000000000001e-05, "loss": 0.0488, "step": 3469 }, { "epoch": 6.40258541089566, "grad_norm": 1.0635749101638794, "learning_rate": 1.0401000000000001e-05, "loss": 0.0613, "step": 3470 }, { "epoch": 6.404432132963989, "grad_norm": 0.8245949149131775, "learning_rate": 1.0404e-05, "loss": 0.0741, "step": 3471 }, { "epoch": 6.406278855032317, "grad_norm": 1.0676509141921997, "learning_rate": 1.0407e-05, "loss": 0.1082, "step": 3472 }, { "epoch": 6.4081255771006465, "grad_norm": 1.1991374492645264, "learning_rate": 1.041e-05, "loss": 0.1308, "step": 3473 }, { "epoch": 6.409972299168975, "grad_norm": 0.5810908675193787, "learning_rate": 1.0413e-05, "loss": 0.0451, "step": 3474 }, { "epoch": 6.411819021237304, "grad_norm": 0.5625260472297668, "learning_rate": 1.0416e-05, "loss": 0.0317, "step": 3475 }, { "epoch": 6.413665743305632, "grad_norm": 0.7243143916130066, "learning_rate": 1.0419e-05, "loss": 0.0504, "step": 3476 }, { "epoch": 6.415512465373961, "grad_norm": 0.5748608708381653, "learning_rate": 1.0422e-05, "loss": 0.0411, "step": 3477 }, { "epoch": 6.41735918744229, "grad_norm": 0.8418190479278564, "learning_rate": 1.0425e-05, "loss": 0.0504, "step": 3478 }, { "epoch": 6.419205909510619, "grad_norm": 0.5589009523391724, "learning_rate": 1.0428e-05, "loss": 0.0473, "step": 3479 }, { "epoch": 6.421052631578947, "grad_norm": 0.3747293949127197, "learning_rate": 1.0431e-05, "loss": 0.0211, "step": 3480 }, { "epoch": 6.422899353647276, "grad_norm": 0.9748400449752808, "learning_rate": 1.0434e-05, "loss": 0.0567, "step": 3481 }, { "epoch": 6.424746075715605, "grad_norm": 0.5059589743614197, "learning_rate": 1.0437e-05, "loss": 0.0307, "step": 3482 }, { "epoch": 6.426592797783933, "grad_norm": 0.6502093076705933, "learning_rate": 1.044e-05, "loss": 0.0454, "step": 3483 }, { "epoch": 6.428439519852263, "grad_norm": 0.994144082069397, "learning_rate": 1.0443000000000001e-05, "loss": 0.0495, "step": 3484 }, { "epoch": 6.430286241920591, "grad_norm": 0.9719415903091431, "learning_rate": 1.0446000000000001e-05, "loss": 0.0471, "step": 3485 }, { "epoch": 6.43213296398892, "grad_norm": 0.5761688351631165, "learning_rate": 1.0449e-05, "loss": 0.0358, "step": 3486 }, { "epoch": 6.433979686057248, "grad_norm": 0.8566411137580872, "learning_rate": 1.0452e-05, "loss": 0.0368, "step": 3487 }, { "epoch": 6.435826408125577, "grad_norm": 1.0438909530639648, "learning_rate": 1.0454999999999999e-05, "loss": 0.0525, "step": 3488 }, { "epoch": 6.437673130193906, "grad_norm": 0.691596508026123, "learning_rate": 1.0458e-05, "loss": 0.0307, "step": 3489 }, { "epoch": 6.439519852262235, "grad_norm": 1.0400809049606323, "learning_rate": 1.0461e-05, "loss": 0.0433, "step": 3490 }, { "epoch": 6.441366574330563, "grad_norm": 1.1551169157028198, "learning_rate": 1.0464e-05, "loss": 0.0412, "step": 3491 }, { "epoch": 6.443213296398892, "grad_norm": 1.3905787467956543, "learning_rate": 1.0467e-05, "loss": 0.0476, "step": 3492 }, { "epoch": 6.44506001846722, "grad_norm": 0.8146647810935974, "learning_rate": 1.047e-05, "loss": 0.0689, "step": 3493 }, { "epoch": 6.44690674053555, "grad_norm": 0.9274573922157288, "learning_rate": 1.0473000000000001e-05, "loss": 0.0461, "step": 3494 }, { "epoch": 6.448753462603878, "grad_norm": 0.9964638352394104, "learning_rate": 1.0476000000000001e-05, "loss": 0.0625, "step": 3495 }, { "epoch": 6.450600184672207, "grad_norm": 0.6893898248672485, "learning_rate": 1.0479e-05, "loss": 0.0425, "step": 3496 }, { "epoch": 6.452446906740535, "grad_norm": 0.627974808216095, "learning_rate": 1.0482e-05, "loss": 0.0476, "step": 3497 }, { "epoch": 6.454293628808864, "grad_norm": 0.8051822185516357, "learning_rate": 1.0485e-05, "loss": 0.0471, "step": 3498 }, { "epoch": 6.456140350877193, "grad_norm": 0.8309305906295776, "learning_rate": 1.0488e-05, "loss": 0.0481, "step": 3499 }, { "epoch": 6.457987072945522, "grad_norm": 0.6573142409324646, "learning_rate": 1.0491e-05, "loss": 0.0397, "step": 3500 }, { "epoch": 6.45983379501385, "grad_norm": 2.428025960922241, "learning_rate": 1.0494e-05, "loss": 0.061, "step": 3501 }, { "epoch": 6.461680517082179, "grad_norm": 1.2131538391113281, "learning_rate": 1.0497e-05, "loss": 0.0639, "step": 3502 }, { "epoch": 6.463527239150508, "grad_norm": 1.5500845909118652, "learning_rate": 1.05e-05, "loss": 0.3862, "step": 3503 }, { "epoch": 6.465373961218837, "grad_norm": 1.091170072555542, "learning_rate": 1.0503000000000001e-05, "loss": 0.3148, "step": 3504 }, { "epoch": 6.467220683287165, "grad_norm": 1.0645906925201416, "learning_rate": 1.0506e-05, "loss": 0.3232, "step": 3505 }, { "epoch": 6.469067405355494, "grad_norm": 1.3559659719467163, "learning_rate": 1.0509e-05, "loss": 0.2834, "step": 3506 }, { "epoch": 6.470914127423823, "grad_norm": 1.1737184524536133, "learning_rate": 1.0512e-05, "loss": 0.2639, "step": 3507 }, { "epoch": 6.472760849492151, "grad_norm": 1.5199416875839233, "learning_rate": 1.0515e-05, "loss": 0.2529, "step": 3508 }, { "epoch": 6.4746075715604805, "grad_norm": 0.9331461191177368, "learning_rate": 1.0518000000000002e-05, "loss": 0.2125, "step": 3509 }, { "epoch": 6.476454293628809, "grad_norm": 0.8014020919799805, "learning_rate": 1.0521000000000001e-05, "loss": 0.2295, "step": 3510 }, { "epoch": 6.478301015697138, "grad_norm": 1.2764960527420044, "learning_rate": 1.0524e-05, "loss": 0.2104, "step": 3511 }, { "epoch": 6.480147737765466, "grad_norm": 0.8344443440437317, "learning_rate": 1.0527e-05, "loss": 0.1395, "step": 3512 }, { "epoch": 6.481994459833795, "grad_norm": 1.5831360816955566, "learning_rate": 1.0529999999999999e-05, "loss": 0.1398, "step": 3513 }, { "epoch": 6.4838411819021236, "grad_norm": 0.9806162118911743, "learning_rate": 1.0533e-05, "loss": 0.1731, "step": 3514 }, { "epoch": 6.485687903970453, "grad_norm": 0.7452002763748169, "learning_rate": 1.0536e-05, "loss": 0.1019, "step": 3515 }, { "epoch": 6.487534626038781, "grad_norm": 0.7927934527397156, "learning_rate": 1.0539e-05, "loss": 0.0808, "step": 3516 }, { "epoch": 6.48938134810711, "grad_norm": 1.0615880489349365, "learning_rate": 1.0542e-05, "loss": 0.1983, "step": 3517 }, { "epoch": 6.491228070175438, "grad_norm": 0.9302128553390503, "learning_rate": 1.0545e-05, "loss": 0.1074, "step": 3518 }, { "epoch": 6.4930747922437675, "grad_norm": 0.8489500284194946, "learning_rate": 1.0548000000000001e-05, "loss": 0.0644, "step": 3519 }, { "epoch": 6.494921514312096, "grad_norm": 0.49855339527130127, "learning_rate": 1.0551000000000001e-05, "loss": 0.0435, "step": 3520 }, { "epoch": 6.496768236380425, "grad_norm": 0.7827534675598145, "learning_rate": 1.0554000000000001e-05, "loss": 0.0548, "step": 3521 }, { "epoch": 6.498614958448753, "grad_norm": 2.3017821311950684, "learning_rate": 1.0557e-05, "loss": 0.0593, "step": 3522 }, { "epoch": 6.500461680517082, "grad_norm": 0.5665956139564514, "learning_rate": 1.0559999999999999e-05, "loss": 0.0539, "step": 3523 }, { "epoch": 6.5023084025854105, "grad_norm": 0.7230385541915894, "learning_rate": 1.0563e-05, "loss": 0.0515, "step": 3524 }, { "epoch": 6.50415512465374, "grad_norm": 0.799625039100647, "learning_rate": 1.0566e-05, "loss": 0.0512, "step": 3525 }, { "epoch": 6.506001846722068, "grad_norm": 0.7463293075561523, "learning_rate": 1.0569e-05, "loss": 0.0479, "step": 3526 }, { "epoch": 6.507848568790397, "grad_norm": 0.9893417358398438, "learning_rate": 1.0572e-05, "loss": 0.0445, "step": 3527 }, { "epoch": 6.509695290858726, "grad_norm": 0.7986618876457214, "learning_rate": 1.0575e-05, "loss": 0.0599, "step": 3528 }, { "epoch": 6.5115420129270545, "grad_norm": 0.6171774864196777, "learning_rate": 1.0578000000000001e-05, "loss": 0.0416, "step": 3529 }, { "epoch": 6.513388734995383, "grad_norm": 0.6155923008918762, "learning_rate": 1.0581e-05, "loss": 0.0321, "step": 3530 }, { "epoch": 6.515235457063712, "grad_norm": 0.8731139302253723, "learning_rate": 1.0584e-05, "loss": 0.048, "step": 3531 }, { "epoch": 6.517082179132041, "grad_norm": 1.004489541053772, "learning_rate": 1.0587e-05, "loss": 0.0373, "step": 3532 }, { "epoch": 6.518928901200369, "grad_norm": 0.6643251180648804, "learning_rate": 1.059e-05, "loss": 0.0398, "step": 3533 }, { "epoch": 6.520775623268698, "grad_norm": 1.0458920001983643, "learning_rate": 1.0593000000000002e-05, "loss": 0.0634, "step": 3534 }, { "epoch": 6.522622345337027, "grad_norm": 0.7619039416313171, "learning_rate": 1.0596e-05, "loss": 0.0452, "step": 3535 }, { "epoch": 6.524469067405356, "grad_norm": 0.49429938197135925, "learning_rate": 1.0599e-05, "loss": 0.0252, "step": 3536 }, { "epoch": 6.526315789473684, "grad_norm": 0.9544243216514587, "learning_rate": 1.0602e-05, "loss": 0.0532, "step": 3537 }, { "epoch": 6.528162511542013, "grad_norm": 0.6411821246147156, "learning_rate": 1.0605e-05, "loss": 0.0463, "step": 3538 }, { "epoch": 6.5300092336103415, "grad_norm": 0.9624695181846619, "learning_rate": 1.0608e-05, "loss": 0.0628, "step": 3539 }, { "epoch": 6.531855955678671, "grad_norm": 0.7749090194702148, "learning_rate": 1.0611e-05, "loss": 0.0786, "step": 3540 }, { "epoch": 6.533702677746999, "grad_norm": 0.8554633259773254, "learning_rate": 1.0614e-05, "loss": 0.0492, "step": 3541 }, { "epoch": 6.535549399815328, "grad_norm": 0.9086441397666931, "learning_rate": 1.0617e-05, "loss": 0.0641, "step": 3542 }, { "epoch": 6.537396121883656, "grad_norm": 1.2479761838912964, "learning_rate": 1.062e-05, "loss": 0.0503, "step": 3543 }, { "epoch": 6.539242843951985, "grad_norm": 0.9384217858314514, "learning_rate": 1.0623000000000001e-05, "loss": 0.0434, "step": 3544 }, { "epoch": 6.541089566020314, "grad_norm": 0.9494989514350891, "learning_rate": 1.0626000000000001e-05, "loss": 0.0533, "step": 3545 }, { "epoch": 6.542936288088643, "grad_norm": 0.9067333340644836, "learning_rate": 1.0629000000000001e-05, "loss": 0.0382, "step": 3546 }, { "epoch": 6.544783010156971, "grad_norm": 0.49929141998291016, "learning_rate": 1.0632000000000001e-05, "loss": 0.0316, "step": 3547 }, { "epoch": 6.5466297322253, "grad_norm": 0.9056301712989807, "learning_rate": 1.0634999999999999e-05, "loss": 0.0556, "step": 3548 }, { "epoch": 6.5484764542936285, "grad_norm": 0.7503829598426819, "learning_rate": 1.0638e-05, "loss": 0.0376, "step": 3549 }, { "epoch": 6.550323176361958, "grad_norm": 0.8950915336608887, "learning_rate": 1.0641e-05, "loss": 0.0437, "step": 3550 }, { "epoch": 6.552169898430286, "grad_norm": 0.932851254940033, "learning_rate": 1.0644e-05, "loss": 0.0763, "step": 3551 }, { "epoch": 6.554016620498615, "grad_norm": 0.9872285723686218, "learning_rate": 1.0647e-05, "loss": 0.0564, "step": 3552 }, { "epoch": 6.555863342566944, "grad_norm": 1.4405521154403687, "learning_rate": 1.065e-05, "loss": 0.3025, "step": 3553 }, { "epoch": 6.557710064635272, "grad_norm": 1.3220841884613037, "learning_rate": 1.0653000000000001e-05, "loss": 0.2998, "step": 3554 }, { "epoch": 6.559556786703601, "grad_norm": 1.004377007484436, "learning_rate": 1.0656000000000001e-05, "loss": 0.275, "step": 3555 }, { "epoch": 6.56140350877193, "grad_norm": 1.3007129430770874, "learning_rate": 1.0659e-05, "loss": 0.2442, "step": 3556 }, { "epoch": 6.563250230840259, "grad_norm": 0.8836926221847534, "learning_rate": 1.0662e-05, "loss": 0.2277, "step": 3557 }, { "epoch": 6.565096952908587, "grad_norm": 1.2006216049194336, "learning_rate": 1.0665e-05, "loss": 0.246, "step": 3558 }, { "epoch": 6.566943674976916, "grad_norm": 0.9763594269752502, "learning_rate": 1.0668000000000002e-05, "loss": 0.1581, "step": 3559 }, { "epoch": 6.568790397045245, "grad_norm": 0.7451435327529907, "learning_rate": 1.0671e-05, "loss": 0.178, "step": 3560 }, { "epoch": 6.570637119113574, "grad_norm": 0.720901370048523, "learning_rate": 1.0674e-05, "loss": 0.1629, "step": 3561 }, { "epoch": 6.572483841181902, "grad_norm": 0.8330392241477966, "learning_rate": 1.0677e-05, "loss": 0.1589, "step": 3562 }, { "epoch": 6.574330563250231, "grad_norm": 0.9879810810089111, "learning_rate": 1.068e-05, "loss": 0.1345, "step": 3563 }, { "epoch": 6.576177285318559, "grad_norm": 0.8167811632156372, "learning_rate": 1.0683000000000001e-05, "loss": 0.1357, "step": 3564 }, { "epoch": 6.5780240073868885, "grad_norm": 1.456222414970398, "learning_rate": 1.0686e-05, "loss": 0.1959, "step": 3565 }, { "epoch": 6.579870729455217, "grad_norm": 0.9722453951835632, "learning_rate": 1.0689e-05, "loss": 0.104, "step": 3566 }, { "epoch": 6.581717451523546, "grad_norm": 0.7950766086578369, "learning_rate": 1.0692e-05, "loss": 0.1026, "step": 3567 }, { "epoch": 6.583564173591874, "grad_norm": 0.7319908142089844, "learning_rate": 1.0695e-05, "loss": 0.0835, "step": 3568 }, { "epoch": 6.585410895660203, "grad_norm": 0.4791616201400757, "learning_rate": 1.0698e-05, "loss": 0.0517, "step": 3569 }, { "epoch": 6.587257617728532, "grad_norm": 0.8066070675849915, "learning_rate": 1.0701000000000001e-05, "loss": 0.0591, "step": 3570 }, { "epoch": 6.589104339796861, "grad_norm": 0.5366147756576538, "learning_rate": 1.0704000000000001e-05, "loss": 0.0609, "step": 3571 }, { "epoch": 6.590951061865189, "grad_norm": 0.5729362368583679, "learning_rate": 1.0707e-05, "loss": 0.067, "step": 3572 }, { "epoch": 6.592797783933518, "grad_norm": 0.8931047320365906, "learning_rate": 1.0709999999999999e-05, "loss": 0.0406, "step": 3573 }, { "epoch": 6.594644506001846, "grad_norm": 0.8464675545692444, "learning_rate": 1.0712999999999999e-05, "loss": 0.06, "step": 3574 }, { "epoch": 6.5964912280701755, "grad_norm": 0.8957421779632568, "learning_rate": 1.0716e-05, "loss": 0.0611, "step": 3575 }, { "epoch": 6.598337950138504, "grad_norm": 0.6566211581230164, "learning_rate": 1.0719e-05, "loss": 0.0469, "step": 3576 }, { "epoch": 6.600184672206833, "grad_norm": 0.6337366104125977, "learning_rate": 1.0722e-05, "loss": 0.0475, "step": 3577 }, { "epoch": 6.602031394275162, "grad_norm": 0.6649699807167053, "learning_rate": 1.0725e-05, "loss": 0.0504, "step": 3578 }, { "epoch": 6.60387811634349, "grad_norm": 0.8092751502990723, "learning_rate": 1.0728e-05, "loss": 0.0717, "step": 3579 }, { "epoch": 6.605724838411819, "grad_norm": 0.6652283668518066, "learning_rate": 1.0731000000000001e-05, "loss": 0.0618, "step": 3580 }, { "epoch": 6.607571560480148, "grad_norm": 1.0060979127883911, "learning_rate": 1.0734000000000001e-05, "loss": 0.0732, "step": 3581 }, { "epoch": 6.609418282548477, "grad_norm": 0.5705205798149109, "learning_rate": 1.0737e-05, "loss": 0.0522, "step": 3582 }, { "epoch": 6.611265004616805, "grad_norm": 0.7541183233261108, "learning_rate": 1.074e-05, "loss": 0.0398, "step": 3583 }, { "epoch": 6.613111726685134, "grad_norm": 0.9432827830314636, "learning_rate": 1.0743e-05, "loss": 0.0441, "step": 3584 }, { "epoch": 6.6149584487534625, "grad_norm": 0.8581227660179138, "learning_rate": 1.0746e-05, "loss": 0.0589, "step": 3585 }, { "epoch": 6.616805170821792, "grad_norm": 0.9152125716209412, "learning_rate": 1.0749e-05, "loss": 0.0627, "step": 3586 }, { "epoch": 6.61865189289012, "grad_norm": 0.9982067346572876, "learning_rate": 1.0752e-05, "loss": 0.0527, "step": 3587 }, { "epoch": 6.620498614958449, "grad_norm": 0.617465615272522, "learning_rate": 1.0755e-05, "loss": 0.0346, "step": 3588 }, { "epoch": 6.622345337026777, "grad_norm": 0.9789791703224182, "learning_rate": 1.0758e-05, "loss": 0.0632, "step": 3589 }, { "epoch": 6.624192059095106, "grad_norm": 0.8940677046775818, "learning_rate": 1.0761e-05, "loss": 0.0495, "step": 3590 }, { "epoch": 6.626038781163435, "grad_norm": 1.0663244724273682, "learning_rate": 1.0764e-05, "loss": 0.0678, "step": 3591 }, { "epoch": 6.627885503231764, "grad_norm": 0.8719698190689087, "learning_rate": 1.0767e-05, "loss": 0.0659, "step": 3592 }, { "epoch": 6.629732225300092, "grad_norm": 0.6242677569389343, "learning_rate": 1.077e-05, "loss": 0.0514, "step": 3593 }, { "epoch": 6.631578947368421, "grad_norm": 0.9718525409698486, "learning_rate": 1.0773e-05, "loss": 0.0539, "step": 3594 }, { "epoch": 6.6334256694367495, "grad_norm": 1.2668012380599976, "learning_rate": 1.0776000000000002e-05, "loss": 0.0596, "step": 3595 }, { "epoch": 6.635272391505079, "grad_norm": 0.9224687814712524, "learning_rate": 1.0779000000000001e-05, "loss": 0.0681, "step": 3596 }, { "epoch": 6.637119113573407, "grad_norm": 0.7575953602790833, "learning_rate": 1.0782e-05, "loss": 0.0416, "step": 3597 }, { "epoch": 6.638965835641736, "grad_norm": 0.6865046620368958, "learning_rate": 1.0785e-05, "loss": 0.0355, "step": 3598 }, { "epoch": 6.640812557710064, "grad_norm": 1.1411089897155762, "learning_rate": 1.0787999999999999e-05, "loss": 0.0513, "step": 3599 }, { "epoch": 6.642659279778393, "grad_norm": 0.8646680116653442, "learning_rate": 1.0791e-05, "loss": 0.0561, "step": 3600 }, { "epoch": 6.644506001846722, "grad_norm": 2.013439893722534, "learning_rate": 1.0794e-05, "loss": 0.0872, "step": 3601 }, { "epoch": 6.646352723915051, "grad_norm": 1.046962857246399, "learning_rate": 1.0797e-05, "loss": 0.0669, "step": 3602 }, { "epoch": 6.64819944598338, "grad_norm": 2.58697772026062, "learning_rate": 1.08e-05, "loss": 0.4191, "step": 3603 }, { "epoch": 6.650046168051708, "grad_norm": 1.5602034330368042, "learning_rate": 1.0803e-05, "loss": 0.3947, "step": 3604 }, { "epoch": 6.6518928901200365, "grad_norm": 1.0020840167999268, "learning_rate": 1.0806000000000001e-05, "loss": 0.32, "step": 3605 }, { "epoch": 6.653739612188366, "grad_norm": 1.0240564346313477, "learning_rate": 1.0809000000000001e-05, "loss": 0.2205, "step": 3606 }, { "epoch": 6.655586334256695, "grad_norm": 1.3797069787979126, "learning_rate": 1.0812e-05, "loss": 0.2478, "step": 3607 }, { "epoch": 6.657433056325023, "grad_norm": 1.4267139434814453, "learning_rate": 1.0815e-05, "loss": 0.2508, "step": 3608 }, { "epoch": 6.659279778393352, "grad_norm": 1.2222124338150024, "learning_rate": 1.0817999999999999e-05, "loss": 0.2054, "step": 3609 }, { "epoch": 6.66112650046168, "grad_norm": 1.127008318901062, "learning_rate": 1.0821e-05, "loss": 0.1975, "step": 3610 }, { "epoch": 6.66297322253001, "grad_norm": 0.9890972375869751, "learning_rate": 1.0824e-05, "loss": 0.1881, "step": 3611 }, { "epoch": 6.664819944598338, "grad_norm": 1.3429679870605469, "learning_rate": 1.0827e-05, "loss": 0.2332, "step": 3612 }, { "epoch": 6.666666666666667, "grad_norm": 1.3030575513839722, "learning_rate": 1.083e-05, "loss": 0.1254, "step": 3613 }, { "epoch": 6.668513388734995, "grad_norm": 2.01155948638916, "learning_rate": 1.0833e-05, "loss": 0.1191, "step": 3614 }, { "epoch": 6.670360110803324, "grad_norm": 0.8010526299476624, "learning_rate": 1.0836000000000001e-05, "loss": 0.122, "step": 3615 }, { "epoch": 6.672206832871653, "grad_norm": 1.210434913635254, "learning_rate": 1.0839e-05, "loss": 0.0684, "step": 3616 }, { "epoch": 6.674053554939982, "grad_norm": 0.9653677344322205, "learning_rate": 1.0842e-05, "loss": 0.0914, "step": 3617 }, { "epoch": 6.67590027700831, "grad_norm": 2.1854913234710693, "learning_rate": 1.0845e-05, "loss": 0.1486, "step": 3618 }, { "epoch": 6.677746999076639, "grad_norm": 0.8655308485031128, "learning_rate": 1.0848e-05, "loss": 0.0763, "step": 3619 }, { "epoch": 6.679593721144967, "grad_norm": 0.42856207489967346, "learning_rate": 1.0851000000000002e-05, "loss": 0.0371, "step": 3620 }, { "epoch": 6.6814404432132966, "grad_norm": 0.8060868978500366, "learning_rate": 1.0854e-05, "loss": 0.0453, "step": 3621 }, { "epoch": 6.683287165281625, "grad_norm": 1.4295423030853271, "learning_rate": 1.0857e-05, "loss": 0.0821, "step": 3622 }, { "epoch": 6.685133887349954, "grad_norm": 0.5448370575904846, "learning_rate": 1.086e-05, "loss": 0.0353, "step": 3623 }, { "epoch": 6.686980609418282, "grad_norm": 0.7300196886062622, "learning_rate": 1.0863e-05, "loss": 0.0528, "step": 3624 }, { "epoch": 6.688827331486611, "grad_norm": 0.6349700093269348, "learning_rate": 1.0866e-05, "loss": 0.0526, "step": 3625 }, { "epoch": 6.69067405355494, "grad_norm": 0.401645690202713, "learning_rate": 1.0869e-05, "loss": 0.023, "step": 3626 }, { "epoch": 6.692520775623269, "grad_norm": 0.5231764912605286, "learning_rate": 1.0872e-05, "loss": 0.0398, "step": 3627 }, { "epoch": 6.694367497691598, "grad_norm": 0.9089896082878113, "learning_rate": 1.0875e-05, "loss": 0.0471, "step": 3628 }, { "epoch": 6.696214219759926, "grad_norm": 0.9645804166793823, "learning_rate": 1.0878e-05, "loss": 0.0676, "step": 3629 }, { "epoch": 6.698060941828254, "grad_norm": 0.7476101517677307, "learning_rate": 1.0881000000000001e-05, "loss": 0.0526, "step": 3630 }, { "epoch": 6.6999076638965835, "grad_norm": 0.7427716255187988, "learning_rate": 1.0884000000000001e-05, "loss": 0.0327, "step": 3631 }, { "epoch": 6.701754385964913, "grad_norm": 0.6101641058921814, "learning_rate": 1.0887000000000001e-05, "loss": 0.0413, "step": 3632 }, { "epoch": 6.703601108033241, "grad_norm": 0.7341259717941284, "learning_rate": 1.089e-05, "loss": 0.0663, "step": 3633 }, { "epoch": 6.70544783010157, "grad_norm": 0.7948614954948425, "learning_rate": 1.0892999999999999e-05, "loss": 0.0883, "step": 3634 }, { "epoch": 6.707294552169898, "grad_norm": 0.7815687656402588, "learning_rate": 1.0896e-05, "loss": 0.0323, "step": 3635 }, { "epoch": 6.7091412742382275, "grad_norm": 0.639951765537262, "learning_rate": 1.0899e-05, "loss": 0.037, "step": 3636 }, { "epoch": 6.710987996306556, "grad_norm": 0.6436964273452759, "learning_rate": 1.0902e-05, "loss": 0.0292, "step": 3637 }, { "epoch": 6.712834718374885, "grad_norm": 0.8461018800735474, "learning_rate": 1.0905e-05, "loss": 0.0442, "step": 3638 }, { "epoch": 6.714681440443213, "grad_norm": 0.724323034286499, "learning_rate": 1.0908e-05, "loss": 0.0471, "step": 3639 }, { "epoch": 6.716528162511542, "grad_norm": 1.6644361019134521, "learning_rate": 1.0911000000000001e-05, "loss": 0.0465, "step": 3640 }, { "epoch": 6.7183748845798705, "grad_norm": 1.0163922309875488, "learning_rate": 1.0914000000000001e-05, "loss": 0.0478, "step": 3641 }, { "epoch": 6.7202216066482, "grad_norm": 1.1931281089782715, "learning_rate": 1.0917e-05, "loss": 0.0705, "step": 3642 }, { "epoch": 6.722068328716528, "grad_norm": 0.6734774112701416, "learning_rate": 1.092e-05, "loss": 0.0346, "step": 3643 }, { "epoch": 6.723915050784857, "grad_norm": 0.6857860684394836, "learning_rate": 1.0923e-05, "loss": 0.0406, "step": 3644 }, { "epoch": 6.725761772853185, "grad_norm": 0.9690484404563904, "learning_rate": 1.0926000000000002e-05, "loss": 0.0689, "step": 3645 }, { "epoch": 6.7276084949215145, "grad_norm": 1.1824640035629272, "learning_rate": 1.0929e-05, "loss": 0.0527, "step": 3646 }, { "epoch": 6.729455216989843, "grad_norm": 0.6219495534896851, "learning_rate": 1.0932e-05, "loss": 0.0349, "step": 3647 }, { "epoch": 6.731301939058172, "grad_norm": 1.6175211668014526, "learning_rate": 1.0935e-05, "loss": 0.0481, "step": 3648 }, { "epoch": 6.7331486611265, "grad_norm": 0.951744794845581, "learning_rate": 1.0938e-05, "loss": 0.0421, "step": 3649 }, { "epoch": 6.734995383194829, "grad_norm": 1.0563994646072388, "learning_rate": 1.0941e-05, "loss": 0.086, "step": 3650 }, { "epoch": 6.7368421052631575, "grad_norm": 1.0864402055740356, "learning_rate": 1.0944e-05, "loss": 0.0374, "step": 3651 }, { "epoch": 6.738688827331487, "grad_norm": 1.0111253261566162, "learning_rate": 1.0947e-05, "loss": 0.0508, "step": 3652 }, { "epoch": 6.740535549399816, "grad_norm": 2.369296073913574, "learning_rate": 1.095e-05, "loss": 0.4443, "step": 3653 }, { "epoch": 6.742382271468144, "grad_norm": 2.0507547855377197, "learning_rate": 1.0953e-05, "loss": 0.3894, "step": 3654 }, { "epoch": 6.744228993536472, "grad_norm": 1.1392920017242432, "learning_rate": 1.0956000000000001e-05, "loss": 0.3178, "step": 3655 }, { "epoch": 6.7460757156048015, "grad_norm": 0.9975927472114563, "learning_rate": 1.0959000000000001e-05, "loss": 0.2191, "step": 3656 }, { "epoch": 6.747922437673131, "grad_norm": 1.681461215019226, "learning_rate": 1.0962000000000001e-05, "loss": 0.2412, "step": 3657 }, { "epoch": 6.749769159741459, "grad_norm": 1.2123366594314575, "learning_rate": 1.0965e-05, "loss": 0.1996, "step": 3658 }, { "epoch": 6.751615881809788, "grad_norm": 0.9915981292724609, "learning_rate": 1.0967999999999999e-05, "loss": 0.2356, "step": 3659 }, { "epoch": 6.753462603878116, "grad_norm": 1.0750139951705933, "learning_rate": 1.0971e-05, "loss": 0.1982, "step": 3660 }, { "epoch": 6.755309325946445, "grad_norm": 1.2357497215270996, "learning_rate": 1.0974e-05, "loss": 0.2488, "step": 3661 }, { "epoch": 6.757156048014774, "grad_norm": 1.0140907764434814, "learning_rate": 1.0977e-05, "loss": 0.1695, "step": 3662 }, { "epoch": 6.759002770083103, "grad_norm": 0.7370830774307251, "learning_rate": 1.098e-05, "loss": 0.1406, "step": 3663 }, { "epoch": 6.760849492151431, "grad_norm": 1.0511841773986816, "learning_rate": 1.0983e-05, "loss": 0.1273, "step": 3664 }, { "epoch": 6.76269621421976, "grad_norm": 0.872833251953125, "learning_rate": 1.0986000000000001e-05, "loss": 0.1098, "step": 3665 }, { "epoch": 6.7645429362880884, "grad_norm": 0.6190897226333618, "learning_rate": 1.0989000000000001e-05, "loss": 0.0684, "step": 3666 }, { "epoch": 6.766389658356418, "grad_norm": 0.555406928062439, "learning_rate": 1.0992e-05, "loss": 0.0599, "step": 3667 }, { "epoch": 6.768236380424746, "grad_norm": 0.817185640335083, "learning_rate": 1.0995e-05, "loss": 0.1271, "step": 3668 }, { "epoch": 6.770083102493075, "grad_norm": 0.9192994832992554, "learning_rate": 1.0998e-05, "loss": 0.0455, "step": 3669 }, { "epoch": 6.771929824561403, "grad_norm": 0.5491228103637695, "learning_rate": 1.1001e-05, "loss": 0.0603, "step": 3670 }, { "epoch": 6.773776546629732, "grad_norm": 0.8587676882743835, "learning_rate": 1.1004e-05, "loss": 0.0706, "step": 3671 }, { "epoch": 6.775623268698061, "grad_norm": 0.6606636643409729, "learning_rate": 1.1007e-05, "loss": 0.0618, "step": 3672 }, { "epoch": 6.77746999076639, "grad_norm": 0.4917014539241791, "learning_rate": 1.101e-05, "loss": 0.0335, "step": 3673 }, { "epoch": 6.779316712834718, "grad_norm": 0.627388060092926, "learning_rate": 1.1013e-05, "loss": 0.0368, "step": 3674 }, { "epoch": 6.781163434903047, "grad_norm": 0.827018141746521, "learning_rate": 1.1016000000000001e-05, "loss": 0.0857, "step": 3675 }, { "epoch": 6.783010156971375, "grad_norm": 0.538213312625885, "learning_rate": 1.1019e-05, "loss": 0.0322, "step": 3676 }, { "epoch": 6.784856879039705, "grad_norm": 0.504508376121521, "learning_rate": 1.1022e-05, "loss": 0.042, "step": 3677 }, { "epoch": 6.786703601108034, "grad_norm": 0.72398841381073, "learning_rate": 1.1025e-05, "loss": 0.0388, "step": 3678 }, { "epoch": 6.788550323176362, "grad_norm": 0.9289087653160095, "learning_rate": 1.1028e-05, "loss": 0.0386, "step": 3679 }, { "epoch": 6.79039704524469, "grad_norm": 0.6446580290794373, "learning_rate": 1.1031000000000002e-05, "loss": 0.0418, "step": 3680 }, { "epoch": 6.792243767313019, "grad_norm": 0.7011333107948303, "learning_rate": 1.1034000000000001e-05, "loss": 0.0596, "step": 3681 }, { "epoch": 6.7940904893813485, "grad_norm": 0.6451525092124939, "learning_rate": 1.1037000000000001e-05, "loss": 0.0462, "step": 3682 }, { "epoch": 6.795937211449677, "grad_norm": 0.9175089001655579, "learning_rate": 1.104e-05, "loss": 0.0433, "step": 3683 }, { "epoch": 6.797783933518006, "grad_norm": 0.9872082471847534, "learning_rate": 1.1042999999999999e-05, "loss": 0.048, "step": 3684 }, { "epoch": 6.799630655586334, "grad_norm": 0.7646401524543762, "learning_rate": 1.1046e-05, "loss": 0.0568, "step": 3685 }, { "epoch": 6.801477377654663, "grad_norm": 0.5979199409484863, "learning_rate": 1.1049e-05, "loss": 0.0415, "step": 3686 }, { "epoch": 6.803324099722992, "grad_norm": 0.816354513168335, "learning_rate": 1.1052e-05, "loss": 0.0618, "step": 3687 }, { "epoch": 6.805170821791321, "grad_norm": 1.424713373184204, "learning_rate": 1.1055e-05, "loss": 0.038, "step": 3688 }, { "epoch": 6.807017543859649, "grad_norm": 0.6271553635597229, "learning_rate": 1.1058e-05, "loss": 0.0324, "step": 3689 }, { "epoch": 6.808864265927978, "grad_norm": 0.9549874663352966, "learning_rate": 1.1061000000000001e-05, "loss": 0.1012, "step": 3690 }, { "epoch": 6.810710987996306, "grad_norm": 0.8129993677139282, "learning_rate": 1.1064000000000001e-05, "loss": 0.0662, "step": 3691 }, { "epoch": 6.8125577100646355, "grad_norm": 2.0147483348846436, "learning_rate": 1.1067000000000001e-05, "loss": 0.0565, "step": 3692 }, { "epoch": 6.814404432132964, "grad_norm": 1.7509359121322632, "learning_rate": 1.107e-05, "loss": 0.0402, "step": 3693 }, { "epoch": 6.816251154201293, "grad_norm": 0.9780064225196838, "learning_rate": 1.1073e-05, "loss": 0.0488, "step": 3694 }, { "epoch": 6.818097876269621, "grad_norm": 0.8157630562782288, "learning_rate": 1.1075999999999999e-05, "loss": 0.0635, "step": 3695 }, { "epoch": 6.81994459833795, "grad_norm": 0.9507924318313599, "learning_rate": 1.1079e-05, "loss": 0.0589, "step": 3696 }, { "epoch": 6.821791320406279, "grad_norm": 0.7969843149185181, "learning_rate": 1.1082e-05, "loss": 0.0434, "step": 3697 }, { "epoch": 6.823638042474608, "grad_norm": 10.65235424041748, "learning_rate": 1.1085e-05, "loss": 0.0229, "step": 3698 }, { "epoch": 6.825484764542936, "grad_norm": 0.8875689506530762, "learning_rate": 1.1088e-05, "loss": 0.0812, "step": 3699 }, { "epoch": 6.827331486611265, "grad_norm": 0.5887988805770874, "learning_rate": 1.1091e-05, "loss": 0.0502, "step": 3700 }, { "epoch": 6.829178208679593, "grad_norm": 1.0622601509094238, "learning_rate": 1.1094e-05, "loss": 0.0771, "step": 3701 }, { "epoch": 6.8310249307479225, "grad_norm": 1.051266074180603, "learning_rate": 1.1097e-05, "loss": 0.0844, "step": 3702 }, { "epoch": 6.832871652816252, "grad_norm": 2.6369080543518066, "learning_rate": 1.11e-05, "loss": 0.4615, "step": 3703 }, { "epoch": 6.83471837488458, "grad_norm": 1.35551118850708, "learning_rate": 1.1103e-05, "loss": 0.2638, "step": 3704 }, { "epoch": 6.836565096952908, "grad_norm": 1.556361198425293, "learning_rate": 1.1106e-05, "loss": 0.3182, "step": 3705 }, { "epoch": 6.838411819021237, "grad_norm": 1.2559797763824463, "learning_rate": 1.1109000000000002e-05, "loss": 0.3061, "step": 3706 }, { "epoch": 6.840258541089566, "grad_norm": 1.3399794101715088, "learning_rate": 1.1112e-05, "loss": 0.297, "step": 3707 }, { "epoch": 6.842105263157895, "grad_norm": 1.2586995363235474, "learning_rate": 1.1115e-05, "loss": 0.2254, "step": 3708 }, { "epoch": 6.843951985226224, "grad_norm": 1.0613036155700684, "learning_rate": 1.1118e-05, "loss": 0.2483, "step": 3709 }, { "epoch": 6.845798707294552, "grad_norm": 1.3740078210830688, "learning_rate": 1.1120999999999999e-05, "loss": 0.2313, "step": 3710 }, { "epoch": 6.847645429362881, "grad_norm": 0.8254731297492981, "learning_rate": 1.1124e-05, "loss": 0.1593, "step": 3711 }, { "epoch": 6.8494921514312095, "grad_norm": 0.7780969142913818, "learning_rate": 1.1127e-05, "loss": 0.1293, "step": 3712 }, { "epoch": 6.851338873499539, "grad_norm": 0.97072833776474, "learning_rate": 1.113e-05, "loss": 0.1721, "step": 3713 }, { "epoch": 6.853185595567867, "grad_norm": 0.9659312963485718, "learning_rate": 1.1133e-05, "loss": 0.1715, "step": 3714 }, { "epoch": 6.855032317636196, "grad_norm": 1.041581630706787, "learning_rate": 1.1136e-05, "loss": 0.2042, "step": 3715 }, { "epoch": 6.856879039704524, "grad_norm": 0.5041184425354004, "learning_rate": 1.1139000000000001e-05, "loss": 0.0725, "step": 3716 }, { "epoch": 6.858725761772853, "grad_norm": 0.4641791880130768, "learning_rate": 1.1142000000000001e-05, "loss": 0.0548, "step": 3717 }, { "epoch": 6.860572483841182, "grad_norm": 0.6150814294815063, "learning_rate": 1.1145000000000001e-05, "loss": 0.0657, "step": 3718 }, { "epoch": 6.862419205909511, "grad_norm": 0.9226855039596558, "learning_rate": 1.1148e-05, "loss": 0.0709, "step": 3719 }, { "epoch": 6.864265927977839, "grad_norm": 0.5464257001876831, "learning_rate": 1.1150999999999999e-05, "loss": 0.0448, "step": 3720 }, { "epoch": 6.866112650046168, "grad_norm": 0.9268885850906372, "learning_rate": 1.1154e-05, "loss": 0.0544, "step": 3721 }, { "epoch": 6.8679593721144965, "grad_norm": 0.6879826784133911, "learning_rate": 1.1157e-05, "loss": 0.0437, "step": 3722 }, { "epoch": 6.869806094182826, "grad_norm": 0.6709389686584473, "learning_rate": 1.116e-05, "loss": 0.0527, "step": 3723 }, { "epoch": 6.871652816251154, "grad_norm": 0.3872710168361664, "learning_rate": 1.1163e-05, "loss": 0.0256, "step": 3724 }, { "epoch": 6.873499538319483, "grad_norm": 0.6115924715995789, "learning_rate": 1.1166e-05, "loss": 0.0489, "step": 3725 }, { "epoch": 6.875346260387811, "grad_norm": 0.6553891897201538, "learning_rate": 1.1169000000000001e-05, "loss": 0.0577, "step": 3726 }, { "epoch": 6.87719298245614, "grad_norm": 1.1549395322799683, "learning_rate": 1.1172e-05, "loss": 0.0787, "step": 3727 }, { "epoch": 6.87903970452447, "grad_norm": 0.7184242606163025, "learning_rate": 1.1175e-05, "loss": 0.0511, "step": 3728 }, { "epoch": 6.880886426592798, "grad_norm": 0.7228502035140991, "learning_rate": 1.1178e-05, "loss": 0.0506, "step": 3729 }, { "epoch": 6.882733148661126, "grad_norm": 1.02662193775177, "learning_rate": 1.1181e-05, "loss": 0.0562, "step": 3730 }, { "epoch": 6.884579870729455, "grad_norm": 0.9584940075874329, "learning_rate": 1.1184000000000002e-05, "loss": 0.0522, "step": 3731 }, { "epoch": 6.886426592797784, "grad_norm": 0.7661015391349792, "learning_rate": 1.1187e-05, "loss": 0.075, "step": 3732 }, { "epoch": 6.888273314866113, "grad_norm": 0.6086714267730713, "learning_rate": 1.119e-05, "loss": 0.0482, "step": 3733 }, { "epoch": 6.890120036934442, "grad_norm": 0.8402490019798279, "learning_rate": 1.1193e-05, "loss": 0.0461, "step": 3734 }, { "epoch": 6.89196675900277, "grad_norm": 0.9646185040473938, "learning_rate": 1.1196e-05, "loss": 0.0348, "step": 3735 }, { "epoch": 6.893813481071099, "grad_norm": 0.8491271734237671, "learning_rate": 1.1199e-05, "loss": 0.0701, "step": 3736 }, { "epoch": 6.895660203139427, "grad_norm": 1.3548365831375122, "learning_rate": 1.1202e-05, "loss": 0.0643, "step": 3737 }, { "epoch": 6.8975069252077565, "grad_norm": 1.1035486459732056, "learning_rate": 1.1205e-05, "loss": 0.0643, "step": 3738 }, { "epoch": 6.899353647276085, "grad_norm": 0.5701906085014343, "learning_rate": 1.1208e-05, "loss": 0.0401, "step": 3739 }, { "epoch": 6.901200369344414, "grad_norm": 0.7111980319023132, "learning_rate": 1.1211e-05, "loss": 0.0554, "step": 3740 }, { "epoch": 6.903047091412742, "grad_norm": 0.6047128438949585, "learning_rate": 1.1214000000000001e-05, "loss": 0.0409, "step": 3741 }, { "epoch": 6.904893813481071, "grad_norm": 1.317233681678772, "learning_rate": 1.1217000000000001e-05, "loss": 0.0716, "step": 3742 }, { "epoch": 6.9067405355494, "grad_norm": 0.8000737428665161, "learning_rate": 1.1220000000000001e-05, "loss": 0.0484, "step": 3743 }, { "epoch": 6.908587257617729, "grad_norm": 0.9985203146934509, "learning_rate": 1.1222999999999999e-05, "loss": 0.0612, "step": 3744 }, { "epoch": 6.910433979686057, "grad_norm": 0.8063696026802063, "learning_rate": 1.1225999999999999e-05, "loss": 0.0494, "step": 3745 }, { "epoch": 6.912280701754386, "grad_norm": 0.9671680331230164, "learning_rate": 1.1229e-05, "loss": 0.0465, "step": 3746 }, { "epoch": 6.914127423822714, "grad_norm": 0.8081934452056885, "learning_rate": 1.1232e-05, "loss": 0.0603, "step": 3747 }, { "epoch": 6.9159741458910435, "grad_norm": 0.6315751671791077, "learning_rate": 1.1235e-05, "loss": 0.0494, "step": 3748 }, { "epoch": 6.917820867959372, "grad_norm": 1.0424327850341797, "learning_rate": 1.1238e-05, "loss": 0.0743, "step": 3749 }, { "epoch": 6.919667590027701, "grad_norm": 0.9841300249099731, "learning_rate": 1.1241e-05, "loss": 0.086, "step": 3750 }, { "epoch": 6.921514312096029, "grad_norm": 0.7991226315498352, "learning_rate": 1.1244000000000001e-05, "loss": 0.045, "step": 3751 }, { "epoch": 6.923361034164358, "grad_norm": 1.1342779397964478, "learning_rate": 1.1247000000000001e-05, "loss": 0.0686, "step": 3752 }, { "epoch": 6.9252077562326875, "grad_norm": 1.4662410020828247, "learning_rate": 1.125e-05, "loss": 0.2982, "step": 3753 }, { "epoch": 6.927054478301016, "grad_norm": 1.1601957082748413, "learning_rate": 1.1253e-05, "loss": 0.3563, "step": 3754 }, { "epoch": 6.928901200369344, "grad_norm": 0.7693567872047424, "learning_rate": 1.1256e-05, "loss": 0.2017, "step": 3755 }, { "epoch": 6.930747922437673, "grad_norm": 1.183059573173523, "learning_rate": 1.1259e-05, "loss": 0.2333, "step": 3756 }, { "epoch": 6.932594644506002, "grad_norm": 0.8615180850028992, "learning_rate": 1.1262e-05, "loss": 0.2091, "step": 3757 }, { "epoch": 6.9344413665743305, "grad_norm": 0.9820761680603027, "learning_rate": 1.1265e-05, "loss": 0.177, "step": 3758 }, { "epoch": 6.93628808864266, "grad_norm": 1.0718605518341064, "learning_rate": 1.1268e-05, "loss": 0.2127, "step": 3759 }, { "epoch": 6.938134810710988, "grad_norm": 0.8176330327987671, "learning_rate": 1.1271e-05, "loss": 0.1607, "step": 3760 }, { "epoch": 6.939981532779317, "grad_norm": 0.8427945375442505, "learning_rate": 1.1274e-05, "loss": 0.1712, "step": 3761 }, { "epoch": 6.941828254847645, "grad_norm": 0.6818265318870544, "learning_rate": 1.1277e-05, "loss": 0.1341, "step": 3762 }, { "epoch": 6.9436749769159745, "grad_norm": 0.7700383067131042, "learning_rate": 1.128e-05, "loss": 0.1158, "step": 3763 }, { "epoch": 6.945521698984303, "grad_norm": 0.5608851313591003, "learning_rate": 1.1283e-05, "loss": 0.0953, "step": 3764 }, { "epoch": 6.947368421052632, "grad_norm": 0.6973187923431396, "learning_rate": 1.1286e-05, "loss": 0.0873, "step": 3765 }, { "epoch": 6.94921514312096, "grad_norm": 0.7664932608604431, "learning_rate": 1.1289000000000002e-05, "loss": 0.1105, "step": 3766 }, { "epoch": 6.951061865189289, "grad_norm": 0.8267348408699036, "learning_rate": 1.1292000000000001e-05, "loss": 0.1101, "step": 3767 }, { "epoch": 6.9529085872576175, "grad_norm": 0.5690252184867859, "learning_rate": 1.1295000000000001e-05, "loss": 0.0683, "step": 3768 }, { "epoch": 6.954755309325947, "grad_norm": 0.5724848508834839, "learning_rate": 1.1298e-05, "loss": 0.0459, "step": 3769 }, { "epoch": 6.956602031394275, "grad_norm": 0.646037220954895, "learning_rate": 1.1300999999999999e-05, "loss": 0.0703, "step": 3770 }, { "epoch": 6.958448753462604, "grad_norm": 0.6106911301612854, "learning_rate": 1.1304e-05, "loss": 0.0535, "step": 3771 }, { "epoch": 6.960295475530932, "grad_norm": 0.5185670852661133, "learning_rate": 1.1307e-05, "loss": 0.0474, "step": 3772 }, { "epoch": 6.9621421975992615, "grad_norm": 0.9679078459739685, "learning_rate": 1.131e-05, "loss": 0.0386, "step": 3773 }, { "epoch": 6.96398891966759, "grad_norm": 0.6940216422080994, "learning_rate": 1.1313e-05, "loss": 0.0564, "step": 3774 }, { "epoch": 6.965835641735919, "grad_norm": 0.6831811666488647, "learning_rate": 1.1316e-05, "loss": 0.0392, "step": 3775 }, { "epoch": 6.967682363804247, "grad_norm": 0.7709175944328308, "learning_rate": 1.1319000000000001e-05, "loss": 0.0409, "step": 3776 }, { "epoch": 6.969529085872576, "grad_norm": 0.7472397685050964, "learning_rate": 1.1322000000000001e-05, "loss": 0.0447, "step": 3777 }, { "epoch": 6.971375807940905, "grad_norm": 0.7726459503173828, "learning_rate": 1.1325e-05, "loss": 0.0454, "step": 3778 }, { "epoch": 6.973222530009234, "grad_norm": 0.6965910792350769, "learning_rate": 1.1328e-05, "loss": 0.0392, "step": 3779 }, { "epoch": 6.975069252077562, "grad_norm": 0.45253920555114746, "learning_rate": 1.1331e-05, "loss": 0.0251, "step": 3780 }, { "epoch": 6.976915974145891, "grad_norm": 0.9065711498260498, "learning_rate": 1.1334e-05, "loss": 0.0474, "step": 3781 }, { "epoch": 6.97876269621422, "grad_norm": 0.5685831308364868, "learning_rate": 1.1337e-05, "loss": 0.0252, "step": 3782 }, { "epoch": 6.980609418282548, "grad_norm": 0.6188499927520752, "learning_rate": 1.134e-05, "loss": 0.0251, "step": 3783 }, { "epoch": 6.982456140350877, "grad_norm": 0.655066967010498, "learning_rate": 1.1343e-05, "loss": 0.0445, "step": 3784 }, { "epoch": 6.984302862419206, "grad_norm": 0.9167073369026184, "learning_rate": 1.1346e-05, "loss": 0.0377, "step": 3785 }, { "epoch": 6.986149584487535, "grad_norm": 0.6690863966941833, "learning_rate": 1.1349000000000001e-05, "loss": 0.0368, "step": 3786 }, { "epoch": 6.987996306555863, "grad_norm": 0.4566117525100708, "learning_rate": 1.1352e-05, "loss": 0.0262, "step": 3787 }, { "epoch": 6.989843028624192, "grad_norm": 0.8612862825393677, "learning_rate": 1.1355e-05, "loss": 0.0529, "step": 3788 }, { "epoch": 6.991689750692521, "grad_norm": 0.8186120986938477, "learning_rate": 1.1358e-05, "loss": 0.055, "step": 3789 }, { "epoch": 6.99353647276085, "grad_norm": 1.036474347114563, "learning_rate": 1.1361e-05, "loss": 0.059, "step": 3790 }, { "epoch": 6.995383194829178, "grad_norm": 0.9849673509597778, "learning_rate": 1.1364000000000002e-05, "loss": 0.0572, "step": 3791 }, { "epoch": 6.997229916897507, "grad_norm": 0.7555782198905945, "learning_rate": 1.1367000000000001e-05, "loss": 0.0437, "step": 3792 }, { "epoch": 6.999076638965835, "grad_norm": 1.5732660293579102, "learning_rate": 1.137e-05, "loss": 0.084, "step": 3793 }, { "epoch": 7.0, "grad_norm": 1.2220367193222046, "learning_rate": 1.1373e-05, "loss": 0.0706, "step": 3794 }, { "epoch": 7.001846722068329, "grad_norm": 1.5011942386627197, "learning_rate": 1.1376e-05, "loss": 0.3325, "step": 3795 }, { "epoch": 7.003693444136657, "grad_norm": 1.134968876838684, "learning_rate": 1.1379e-05, "loss": 0.3102, "step": 3796 }, { "epoch": 7.0055401662049865, "grad_norm": 1.1414200067520142, "learning_rate": 1.1382e-05, "loss": 0.289, "step": 3797 }, { "epoch": 7.007386888273315, "grad_norm": 1.4115161895751953, "learning_rate": 1.1385e-05, "loss": 0.311, "step": 3798 }, { "epoch": 7.009233610341644, "grad_norm": 1.1737440824508667, "learning_rate": 1.1388e-05, "loss": 0.2275, "step": 3799 }, { "epoch": 7.011080332409972, "grad_norm": 0.9778410196304321, "learning_rate": 1.1391e-05, "loss": 0.2252, "step": 3800 }, { "epoch": 7.012927054478301, "grad_norm": 1.2007774114608765, "learning_rate": 1.1394000000000001e-05, "loss": 0.2059, "step": 3801 }, { "epoch": 7.01477377654663, "grad_norm": 0.9967468976974487, "learning_rate": 1.1397000000000001e-05, "loss": 0.1763, "step": 3802 }, { "epoch": 7.016620498614959, "grad_norm": 0.8212352395057678, "learning_rate": 1.1400000000000001e-05, "loss": 0.1688, "step": 3803 }, { "epoch": 7.018467220683287, "grad_norm": 0.7178338170051575, "learning_rate": 1.1403e-05, "loss": 0.1367, "step": 3804 }, { "epoch": 7.020313942751616, "grad_norm": 0.8649212121963501, "learning_rate": 1.1406e-05, "loss": 0.1519, "step": 3805 }, { "epoch": 7.022160664819944, "grad_norm": 0.7651941776275635, "learning_rate": 1.1409e-05, "loss": 0.1076, "step": 3806 }, { "epoch": 7.0240073868882735, "grad_norm": 0.6185657978057861, "learning_rate": 1.1412e-05, "loss": 0.0653, "step": 3807 }, { "epoch": 7.025854108956602, "grad_norm": 0.6425574421882629, "learning_rate": 1.1415e-05, "loss": 0.0571, "step": 3808 }, { "epoch": 7.027700831024931, "grad_norm": 0.7995126247406006, "learning_rate": 1.1418e-05, "loss": 0.0715, "step": 3809 }, { "epoch": 7.029547553093259, "grad_norm": 0.525067925453186, "learning_rate": 1.1421e-05, "loss": 0.0417, "step": 3810 }, { "epoch": 7.031394275161588, "grad_norm": 0.8681696057319641, "learning_rate": 1.1424000000000001e-05, "loss": 0.054, "step": 3811 }, { "epoch": 7.033240997229917, "grad_norm": 0.5476704239845276, "learning_rate": 1.1427000000000001e-05, "loss": 0.0404, "step": 3812 }, { "epoch": 7.035087719298246, "grad_norm": 0.5106635689735413, "learning_rate": 1.143e-05, "loss": 0.0505, "step": 3813 }, { "epoch": 7.036934441366574, "grad_norm": 0.837536096572876, "learning_rate": 1.1433e-05, "loss": 0.0446, "step": 3814 }, { "epoch": 7.038781163434903, "grad_norm": 0.47881874442100525, "learning_rate": 1.1436e-05, "loss": 0.0343, "step": 3815 }, { "epoch": 7.040627885503231, "grad_norm": 1.3114343881607056, "learning_rate": 1.1439e-05, "loss": 0.0292, "step": 3816 }, { "epoch": 7.0424746075715605, "grad_norm": 0.8697632551193237, "learning_rate": 1.1442000000000002e-05, "loss": 0.051, "step": 3817 }, { "epoch": 7.044321329639889, "grad_norm": 0.7076679468154907, "learning_rate": 1.1445e-05, "loss": 0.0379, "step": 3818 }, { "epoch": 7.046168051708218, "grad_norm": 0.933702826499939, "learning_rate": 1.1448e-05, "loss": 0.049, "step": 3819 }, { "epoch": 7.048014773776547, "grad_norm": 0.5204328298568726, "learning_rate": 1.1451e-05, "loss": 0.0374, "step": 3820 }, { "epoch": 7.049861495844875, "grad_norm": 0.8326895236968994, "learning_rate": 1.1453999999999999e-05, "loss": 0.0464, "step": 3821 }, { "epoch": 7.0517082179132045, "grad_norm": 1.0531866550445557, "learning_rate": 1.1457e-05, "loss": 0.0297, "step": 3822 }, { "epoch": 7.053554939981533, "grad_norm": 0.4825021028518677, "learning_rate": 1.146e-05, "loss": 0.0404, "step": 3823 }, { "epoch": 7.055401662049862, "grad_norm": 0.8723925352096558, "learning_rate": 1.1463e-05, "loss": 0.0398, "step": 3824 }, { "epoch": 7.05724838411819, "grad_norm": 0.787763237953186, "learning_rate": 1.1466e-05, "loss": 0.0409, "step": 3825 }, { "epoch": 7.059095106186519, "grad_norm": 1.250771164894104, "learning_rate": 1.1469e-05, "loss": 0.063, "step": 3826 }, { "epoch": 7.0609418282548475, "grad_norm": 1.2153242826461792, "learning_rate": 1.1472000000000001e-05, "loss": 0.0315, "step": 3827 }, { "epoch": 7.062788550323177, "grad_norm": 0.47222232818603516, "learning_rate": 1.1475000000000001e-05, "loss": 0.0348, "step": 3828 }, { "epoch": 7.064635272391505, "grad_norm": 1.2347041368484497, "learning_rate": 1.1478000000000001e-05, "loss": 0.0351, "step": 3829 }, { "epoch": 7.066481994459834, "grad_norm": 0.6874457597732544, "learning_rate": 1.1480999999999999e-05, "loss": 0.0392, "step": 3830 }, { "epoch": 7.068328716528162, "grad_norm": 0.6906430125236511, "learning_rate": 1.1483999999999999e-05, "loss": 0.048, "step": 3831 }, { "epoch": 7.0701754385964914, "grad_norm": 0.5939635038375854, "learning_rate": 1.1487e-05, "loss": 0.0296, "step": 3832 }, { "epoch": 7.07202216066482, "grad_norm": 0.6054182052612305, "learning_rate": 1.149e-05, "loss": 0.0344, "step": 3833 }, { "epoch": 7.073868882733149, "grad_norm": 1.7528551816940308, "learning_rate": 1.1493e-05, "loss": 0.0575, "step": 3834 }, { "epoch": 7.075715604801477, "grad_norm": 1.990154504776001, "learning_rate": 1.1496e-05, "loss": 0.0416, "step": 3835 }, { "epoch": 7.077562326869806, "grad_norm": 0.8029806613922119, "learning_rate": 1.1499e-05, "loss": 0.0413, "step": 3836 }, { "epoch": 7.0794090489381345, "grad_norm": 0.7306238412857056, "learning_rate": 1.1502000000000001e-05, "loss": 0.0336, "step": 3837 }, { "epoch": 7.081255771006464, "grad_norm": 0.5533416867256165, "learning_rate": 1.1505e-05, "loss": 0.0253, "step": 3838 }, { "epoch": 7.083102493074792, "grad_norm": 1.076889991760254, "learning_rate": 1.1508e-05, "loss": 0.0271, "step": 3839 }, { "epoch": 7.084949215143121, "grad_norm": 0.8983293175697327, "learning_rate": 1.1511e-05, "loss": 0.0578, "step": 3840 }, { "epoch": 7.086795937211449, "grad_norm": 0.756494402885437, "learning_rate": 1.1514e-05, "loss": 0.0386, "step": 3841 }, { "epoch": 7.088642659279778, "grad_norm": 0.9268624186515808, "learning_rate": 1.1517e-05, "loss": 0.0378, "step": 3842 }, { "epoch": 7.090489381348107, "grad_norm": 0.760355532169342, "learning_rate": 1.152e-05, "loss": 0.0555, "step": 3843 }, { "epoch": 7.092336103416436, "grad_norm": 0.9901613593101501, "learning_rate": 1.1523e-05, "loss": 0.0419, "step": 3844 }, { "epoch": 7.094182825484765, "grad_norm": 0.8592517971992493, "learning_rate": 1.1526e-05, "loss": 0.2719, "step": 3845 }, { "epoch": 7.096029547553093, "grad_norm": 1.2683329582214355, "learning_rate": 1.1529e-05, "loss": 0.3161, "step": 3846 }, { "epoch": 7.097876269621422, "grad_norm": 1.0169157981872559, "learning_rate": 1.1532e-05, "loss": 0.2672, "step": 3847 }, { "epoch": 7.099722991689751, "grad_norm": 0.9672853350639343, "learning_rate": 1.1535e-05, "loss": 0.2085, "step": 3848 }, { "epoch": 7.10156971375808, "grad_norm": 1.0237081050872803, "learning_rate": 1.1538e-05, "loss": 0.221, "step": 3849 }, { "epoch": 7.103416435826408, "grad_norm": 0.6840682625770569, "learning_rate": 1.1541e-05, "loss": 0.1727, "step": 3850 }, { "epoch": 7.105263157894737, "grad_norm": 0.8392327427864075, "learning_rate": 1.1544e-05, "loss": 0.1953, "step": 3851 }, { "epoch": 7.107109879963065, "grad_norm": 1.0461645126342773, "learning_rate": 1.1547000000000001e-05, "loss": 0.1848, "step": 3852 }, { "epoch": 7.108956602031395, "grad_norm": 0.9008057117462158, "learning_rate": 1.1550000000000001e-05, "loss": 0.1761, "step": 3853 }, { "epoch": 7.110803324099723, "grad_norm": 0.653768002986908, "learning_rate": 1.1553000000000001e-05, "loss": 0.1228, "step": 3854 }, { "epoch": 7.112650046168052, "grad_norm": 0.8528527021408081, "learning_rate": 1.1555999999999999e-05, "loss": 0.119, "step": 3855 }, { "epoch": 7.11449676823638, "grad_norm": 0.7448746562004089, "learning_rate": 1.1558999999999999e-05, "loss": 0.1272, "step": 3856 }, { "epoch": 7.116343490304709, "grad_norm": 0.7983085513114929, "learning_rate": 1.1562e-05, "loss": 0.0873, "step": 3857 }, { "epoch": 7.118190212373038, "grad_norm": 0.6713128089904785, "learning_rate": 1.1565e-05, "loss": 0.0652, "step": 3858 }, { "epoch": 7.120036934441367, "grad_norm": 1.0396053791046143, "learning_rate": 1.1568e-05, "loss": 0.1061, "step": 3859 }, { "epoch": 7.121883656509695, "grad_norm": 1.0687631368637085, "learning_rate": 1.1571e-05, "loss": 0.0647, "step": 3860 }, { "epoch": 7.123730378578024, "grad_norm": 0.4817259907722473, "learning_rate": 1.1574e-05, "loss": 0.0363, "step": 3861 }, { "epoch": 7.125577100646352, "grad_norm": 0.6856535077095032, "learning_rate": 1.1577000000000001e-05, "loss": 0.0456, "step": 3862 }, { "epoch": 7.127423822714682, "grad_norm": 0.5848652720451355, "learning_rate": 1.1580000000000001e-05, "loss": 0.0571, "step": 3863 }, { "epoch": 7.12927054478301, "grad_norm": 0.8321937322616577, "learning_rate": 1.1583e-05, "loss": 0.095, "step": 3864 }, { "epoch": 7.131117266851339, "grad_norm": 0.6058899164199829, "learning_rate": 1.1586e-05, "loss": 0.051, "step": 3865 }, { "epoch": 7.132963988919667, "grad_norm": 0.4666897654533386, "learning_rate": 1.1589e-05, "loss": 0.0349, "step": 3866 }, { "epoch": 7.134810710987996, "grad_norm": 1.0036622285842896, "learning_rate": 1.1592e-05, "loss": 0.045, "step": 3867 }, { "epoch": 7.136657433056325, "grad_norm": 0.48680198192596436, "learning_rate": 1.1595e-05, "loss": 0.033, "step": 3868 }, { "epoch": 7.138504155124654, "grad_norm": 1.0338425636291504, "learning_rate": 1.1598e-05, "loss": 0.0455, "step": 3869 }, { "epoch": 7.140350877192983, "grad_norm": 0.6906830668449402, "learning_rate": 1.1601e-05, "loss": 0.066, "step": 3870 }, { "epoch": 7.142197599261311, "grad_norm": 0.5115965008735657, "learning_rate": 1.1604e-05, "loss": 0.0264, "step": 3871 }, { "epoch": 7.14404432132964, "grad_norm": 0.62877357006073, "learning_rate": 1.1607000000000001e-05, "loss": 0.0313, "step": 3872 }, { "epoch": 7.1458910433979685, "grad_norm": 0.562495231628418, "learning_rate": 1.161e-05, "loss": 0.0365, "step": 3873 }, { "epoch": 7.147737765466298, "grad_norm": 1.0443249940872192, "learning_rate": 1.1613e-05, "loss": 0.044, "step": 3874 }, { "epoch": 7.149584487534626, "grad_norm": 0.8314567804336548, "learning_rate": 1.1616e-05, "loss": 0.0383, "step": 3875 }, { "epoch": 7.151431209602955, "grad_norm": 0.8624547719955444, "learning_rate": 1.1619e-05, "loss": 0.0465, "step": 3876 }, { "epoch": 7.153277931671283, "grad_norm": 0.43689149618148804, "learning_rate": 1.1622000000000002e-05, "loss": 0.0258, "step": 3877 }, { "epoch": 7.1551246537396125, "grad_norm": 0.5368509888648987, "learning_rate": 1.1625000000000001e-05, "loss": 0.0221, "step": 3878 }, { "epoch": 7.156971375807941, "grad_norm": 0.41164904832839966, "learning_rate": 1.1628e-05, "loss": 0.0264, "step": 3879 }, { "epoch": 7.15881809787627, "grad_norm": 0.7307270169258118, "learning_rate": 1.1631e-05, "loss": 0.0417, "step": 3880 }, { "epoch": 7.160664819944598, "grad_norm": 0.9499558806419373, "learning_rate": 1.1633999999999999e-05, "loss": 0.036, "step": 3881 }, { "epoch": 7.162511542012927, "grad_norm": 0.8061280250549316, "learning_rate": 1.1637e-05, "loss": 0.031, "step": 3882 }, { "epoch": 7.1643582640812555, "grad_norm": 0.5703011155128479, "learning_rate": 1.164e-05, "loss": 0.0456, "step": 3883 }, { "epoch": 7.166204986149585, "grad_norm": 0.7621678113937378, "learning_rate": 1.1643e-05, "loss": 0.0279, "step": 3884 }, { "epoch": 7.168051708217913, "grad_norm": 0.5694484710693359, "learning_rate": 1.1646e-05, "loss": 0.0405, "step": 3885 }, { "epoch": 7.169898430286242, "grad_norm": 0.5033777952194214, "learning_rate": 1.1649e-05, "loss": 0.0246, "step": 3886 }, { "epoch": 7.17174515235457, "grad_norm": 0.6190872192382812, "learning_rate": 1.1652000000000001e-05, "loss": 0.0375, "step": 3887 }, { "epoch": 7.1735918744228995, "grad_norm": 0.5975775718688965, "learning_rate": 1.1655000000000001e-05, "loss": 0.0288, "step": 3888 }, { "epoch": 7.175438596491228, "grad_norm": 0.7166990041732788, "learning_rate": 1.1658000000000001e-05, "loss": 0.0409, "step": 3889 }, { "epoch": 7.177285318559557, "grad_norm": 0.9195456504821777, "learning_rate": 1.1661e-05, "loss": 0.0482, "step": 3890 }, { "epoch": 7.179132040627885, "grad_norm": 1.0429009199142456, "learning_rate": 1.1664e-05, "loss": 0.0508, "step": 3891 }, { "epoch": 7.180978762696214, "grad_norm": 0.8649551868438721, "learning_rate": 1.1667e-05, "loss": 0.0636, "step": 3892 }, { "epoch": 7.1828254847645425, "grad_norm": 0.7114192247390747, "learning_rate": 1.167e-05, "loss": 0.0393, "step": 3893 }, { "epoch": 7.184672206832872, "grad_norm": 1.0726664066314697, "learning_rate": 1.1673e-05, "loss": 0.0507, "step": 3894 }, { "epoch": 7.186518928901201, "grad_norm": 2.35176944732666, "learning_rate": 1.1676e-05, "loss": 0.3612, "step": 3895 }, { "epoch": 7.188365650969529, "grad_norm": 1.3197213411331177, "learning_rate": 1.1679e-05, "loss": 0.3027, "step": 3896 }, { "epoch": 7.190212373037858, "grad_norm": 1.1717162132263184, "learning_rate": 1.1682000000000001e-05, "loss": 0.3242, "step": 3897 }, { "epoch": 7.1920590951061865, "grad_norm": 1.3300485610961914, "learning_rate": 1.1685e-05, "loss": 0.2406, "step": 3898 }, { "epoch": 7.193905817174516, "grad_norm": 0.8447170853614807, "learning_rate": 1.1688e-05, "loss": 0.18, "step": 3899 }, { "epoch": 7.195752539242844, "grad_norm": 0.9538955092430115, "learning_rate": 1.1691e-05, "loss": 0.1968, "step": 3900 }, { "epoch": 7.197599261311173, "grad_norm": 0.7060114145278931, "learning_rate": 1.1694e-05, "loss": 0.1535, "step": 3901 }, { "epoch": 7.199445983379501, "grad_norm": 1.2481669187545776, "learning_rate": 1.1697000000000002e-05, "loss": 0.2951, "step": 3902 }, { "epoch": 7.20129270544783, "grad_norm": 0.7738790512084961, "learning_rate": 1.1700000000000001e-05, "loss": 0.1675, "step": 3903 }, { "epoch": 7.203139427516159, "grad_norm": 1.215665578842163, "learning_rate": 1.1703e-05, "loss": 0.1223, "step": 3904 }, { "epoch": 7.204986149584488, "grad_norm": 0.5640180706977844, "learning_rate": 1.1706e-05, "loss": 0.1018, "step": 3905 }, { "epoch": 7.206832871652816, "grad_norm": 0.6813676953315735, "learning_rate": 1.1709e-05, "loss": 0.0955, "step": 3906 }, { "epoch": 7.208679593721145, "grad_norm": 0.8873261213302612, "learning_rate": 1.1712e-05, "loss": 0.1367, "step": 3907 }, { "epoch": 7.2105263157894735, "grad_norm": 0.8507635593414307, "learning_rate": 1.1715e-05, "loss": 0.1125, "step": 3908 }, { "epoch": 7.212373037857803, "grad_norm": 0.5705675482749939, "learning_rate": 1.1718e-05, "loss": 0.0964, "step": 3909 }, { "epoch": 7.214219759926131, "grad_norm": 0.47501277923583984, "learning_rate": 1.1721e-05, "loss": 0.0511, "step": 3910 }, { "epoch": 7.21606648199446, "grad_norm": 0.48651376366615295, "learning_rate": 1.1724e-05, "loss": 0.0397, "step": 3911 }, { "epoch": 7.217913204062788, "grad_norm": 0.4294663667678833, "learning_rate": 1.1727000000000001e-05, "loss": 0.0319, "step": 3912 }, { "epoch": 7.219759926131117, "grad_norm": 0.500373125076294, "learning_rate": 1.1730000000000001e-05, "loss": 0.0353, "step": 3913 }, { "epoch": 7.221606648199446, "grad_norm": 0.4776148498058319, "learning_rate": 1.1733000000000001e-05, "loss": 0.0328, "step": 3914 }, { "epoch": 7.223453370267775, "grad_norm": 0.45514485239982605, "learning_rate": 1.1736e-05, "loss": 0.0373, "step": 3915 }, { "epoch": 7.225300092336103, "grad_norm": 0.39044058322906494, "learning_rate": 1.1738999999999999e-05, "loss": 0.0241, "step": 3916 }, { "epoch": 7.227146814404432, "grad_norm": 1.1295156478881836, "learning_rate": 1.1742e-05, "loss": 0.042, "step": 3917 }, { "epoch": 7.22899353647276, "grad_norm": 0.6388261318206787, "learning_rate": 1.1745e-05, "loss": 0.0614, "step": 3918 }, { "epoch": 7.23084025854109, "grad_norm": 0.4165383279323578, "learning_rate": 1.1748e-05, "loss": 0.0182, "step": 3919 }, { "epoch": 7.232686980609419, "grad_norm": 0.6110149025917053, "learning_rate": 1.1751e-05, "loss": 0.0342, "step": 3920 }, { "epoch": 7.234533702677747, "grad_norm": 1.3807674646377563, "learning_rate": 1.1754e-05, "loss": 0.0954, "step": 3921 }, { "epoch": 7.236380424746076, "grad_norm": 1.6706429719924927, "learning_rate": 1.1757000000000001e-05, "loss": 0.0347, "step": 3922 }, { "epoch": 7.238227146814404, "grad_norm": 0.49312475323677063, "learning_rate": 1.1760000000000001e-05, "loss": 0.0276, "step": 3923 }, { "epoch": 7.2400738688827335, "grad_norm": 0.6351246237754822, "learning_rate": 1.1763e-05, "loss": 0.0343, "step": 3924 }, { "epoch": 7.241920590951062, "grad_norm": 0.7758086323738098, "learning_rate": 1.1766e-05, "loss": 0.0379, "step": 3925 }, { "epoch": 7.243767313019391, "grad_norm": 1.6319568157196045, "learning_rate": 1.1769e-05, "loss": 0.0458, "step": 3926 }, { "epoch": 7.245614035087719, "grad_norm": 0.5458925366401672, "learning_rate": 1.1772000000000002e-05, "loss": 0.0333, "step": 3927 }, { "epoch": 7.247460757156048, "grad_norm": 0.924403190612793, "learning_rate": 1.1775000000000002e-05, "loss": 0.0626, "step": 3928 }, { "epoch": 7.249307479224377, "grad_norm": 1.2806562185287476, "learning_rate": 1.1778e-05, "loss": 0.0589, "step": 3929 }, { "epoch": 7.251154201292706, "grad_norm": 0.6534441113471985, "learning_rate": 1.1781e-05, "loss": 0.0417, "step": 3930 }, { "epoch": 7.253000923361034, "grad_norm": 0.7234696745872498, "learning_rate": 1.1784e-05, "loss": 0.0352, "step": 3931 }, { "epoch": 7.254847645429363, "grad_norm": 0.5123321413993835, "learning_rate": 1.1787e-05, "loss": 0.0338, "step": 3932 }, { "epoch": 7.256694367497691, "grad_norm": 0.5848380327224731, "learning_rate": 1.179e-05, "loss": 0.0425, "step": 3933 }, { "epoch": 7.2585410895660205, "grad_norm": 0.6774309873580933, "learning_rate": 1.1793e-05, "loss": 0.0392, "step": 3934 }, { "epoch": 7.260387811634349, "grad_norm": 0.8812325596809387, "learning_rate": 1.1796e-05, "loss": 0.0424, "step": 3935 }, { "epoch": 7.262234533702678, "grad_norm": 0.8823750019073486, "learning_rate": 1.1799e-05, "loss": 0.046, "step": 3936 }, { "epoch": 7.264081255771006, "grad_norm": 0.8765324950218201, "learning_rate": 1.1802000000000002e-05, "loss": 0.0358, "step": 3937 }, { "epoch": 7.265927977839335, "grad_norm": 0.4929245114326477, "learning_rate": 1.1805000000000001e-05, "loss": 0.0271, "step": 3938 }, { "epoch": 7.267774699907664, "grad_norm": 0.555218517780304, "learning_rate": 1.1808000000000001e-05, "loss": 0.0404, "step": 3939 }, { "epoch": 7.269621421975993, "grad_norm": 0.9540738463401794, "learning_rate": 1.1811000000000001e-05, "loss": 0.0488, "step": 3940 }, { "epoch": 7.271468144044321, "grad_norm": 0.7682968378067017, "learning_rate": 1.1813999999999999e-05, "loss": 0.0509, "step": 3941 }, { "epoch": 7.27331486611265, "grad_norm": 1.5334378480911255, "learning_rate": 1.1816999999999999e-05, "loss": 0.0606, "step": 3942 }, { "epoch": 7.275161588180978, "grad_norm": 0.9216906428337097, "learning_rate": 1.182e-05, "loss": 0.0483, "step": 3943 }, { "epoch": 7.2770083102493075, "grad_norm": 0.915949821472168, "learning_rate": 1.1823e-05, "loss": 0.0442, "step": 3944 }, { "epoch": 7.278855032317637, "grad_norm": 1.0504190921783447, "learning_rate": 1.1826e-05, "loss": 0.2944, "step": 3945 }, { "epoch": 7.280701754385965, "grad_norm": 0.9758450388908386, "learning_rate": 1.1829e-05, "loss": 0.3035, "step": 3946 }, { "epoch": 7.282548476454294, "grad_norm": 0.9999068379402161, "learning_rate": 1.1832e-05, "loss": 0.2148, "step": 3947 }, { "epoch": 7.284395198522622, "grad_norm": 2.185529947280884, "learning_rate": 1.1835000000000001e-05, "loss": 0.2184, "step": 3948 }, { "epoch": 7.286241920590951, "grad_norm": 0.9298887252807617, "learning_rate": 1.1838e-05, "loss": 0.1772, "step": 3949 }, { "epoch": 7.28808864265928, "grad_norm": 1.4344820976257324, "learning_rate": 1.1841e-05, "loss": 0.2368, "step": 3950 }, { "epoch": 7.289935364727609, "grad_norm": 1.246220350265503, "learning_rate": 1.1844e-05, "loss": 0.1624, "step": 3951 }, { "epoch": 7.291782086795937, "grad_norm": 0.7392306923866272, "learning_rate": 1.1847e-05, "loss": 0.1859, "step": 3952 }, { "epoch": 7.293628808864266, "grad_norm": 0.8688386678695679, "learning_rate": 1.185e-05, "loss": 0.1944, "step": 3953 }, { "epoch": 7.2954755309325945, "grad_norm": 0.7431932687759399, "learning_rate": 1.1853e-05, "loss": 0.1005, "step": 3954 }, { "epoch": 7.297322253000924, "grad_norm": 0.9904128909111023, "learning_rate": 1.1856e-05, "loss": 0.1232, "step": 3955 }, { "epoch": 7.299168975069252, "grad_norm": 0.7489523887634277, "learning_rate": 1.1859e-05, "loss": 0.079, "step": 3956 }, { "epoch": 7.301015697137581, "grad_norm": 2.4830214977264404, "learning_rate": 1.1862e-05, "loss": 0.0734, "step": 3957 }, { "epoch": 7.302862419205909, "grad_norm": 0.47750627994537354, "learning_rate": 1.1865e-05, "loss": 0.0462, "step": 3958 }, { "epoch": 7.304709141274238, "grad_norm": 0.5852447748184204, "learning_rate": 1.1868e-05, "loss": 0.0975, "step": 3959 }, { "epoch": 7.306555863342567, "grad_norm": 0.586001455783844, "learning_rate": 1.1871e-05, "loss": 0.0861, "step": 3960 }, { "epoch": 7.308402585410896, "grad_norm": 0.6134559512138367, "learning_rate": 1.1874e-05, "loss": 0.0558, "step": 3961 }, { "epoch": 7.310249307479224, "grad_norm": 1.9210140705108643, "learning_rate": 1.1877e-05, "loss": 0.0457, "step": 3962 }, { "epoch": 7.312096029547553, "grad_norm": 1.0939297676086426, "learning_rate": 1.1880000000000001e-05, "loss": 0.0987, "step": 3963 }, { "epoch": 7.3139427516158815, "grad_norm": 0.9990463256835938, "learning_rate": 1.1883000000000001e-05, "loss": 0.1067, "step": 3964 }, { "epoch": 7.315789473684211, "grad_norm": 0.7564393877983093, "learning_rate": 1.1886e-05, "loss": 0.0613, "step": 3965 }, { "epoch": 7.317636195752539, "grad_norm": 0.4570484459400177, "learning_rate": 1.1889e-05, "loss": 0.0392, "step": 3966 }, { "epoch": 7.319482917820868, "grad_norm": 0.593252420425415, "learning_rate": 1.1891999999999999e-05, "loss": 0.0469, "step": 3967 }, { "epoch": 7.321329639889196, "grad_norm": 0.7046050429344177, "learning_rate": 1.1895e-05, "loss": 0.0436, "step": 3968 }, { "epoch": 7.323176361957525, "grad_norm": 0.647758424282074, "learning_rate": 1.1898e-05, "loss": 0.041, "step": 3969 }, { "epoch": 7.325023084025855, "grad_norm": 0.7471902370452881, "learning_rate": 1.1901e-05, "loss": 0.0623, "step": 3970 }, { "epoch": 7.326869806094183, "grad_norm": 0.6938056945800781, "learning_rate": 1.1904e-05, "loss": 0.0369, "step": 3971 }, { "epoch": 7.328716528162512, "grad_norm": 0.6024402976036072, "learning_rate": 1.1907e-05, "loss": 0.0309, "step": 3972 }, { "epoch": 7.33056325023084, "grad_norm": 0.9108567833900452, "learning_rate": 1.1910000000000001e-05, "loss": 0.0313, "step": 3973 }, { "epoch": 7.332409972299169, "grad_norm": 0.8201056718826294, "learning_rate": 1.1913000000000001e-05, "loss": 0.05, "step": 3974 }, { "epoch": 7.334256694367498, "grad_norm": 0.6461535692214966, "learning_rate": 1.1916e-05, "loss": 0.0398, "step": 3975 }, { "epoch": 7.336103416435827, "grad_norm": 0.6558883786201477, "learning_rate": 1.1919e-05, "loss": 0.0392, "step": 3976 }, { "epoch": 7.337950138504155, "grad_norm": 0.6368681192398071, "learning_rate": 1.1922e-05, "loss": 0.0341, "step": 3977 }, { "epoch": 7.339796860572484, "grad_norm": 0.6736059784889221, "learning_rate": 1.1925e-05, "loss": 0.0378, "step": 3978 }, { "epoch": 7.341643582640812, "grad_norm": 0.7553744912147522, "learning_rate": 1.1928e-05, "loss": 0.0513, "step": 3979 }, { "epoch": 7.3434903047091415, "grad_norm": 0.7779793739318848, "learning_rate": 1.1931e-05, "loss": 0.0427, "step": 3980 }, { "epoch": 7.34533702677747, "grad_norm": 0.7781184315681458, "learning_rate": 1.1934e-05, "loss": 0.0525, "step": 3981 }, { "epoch": 7.347183748845799, "grad_norm": 0.5072903037071228, "learning_rate": 1.1937e-05, "loss": 0.0327, "step": 3982 }, { "epoch": 7.349030470914127, "grad_norm": 0.7815638780593872, "learning_rate": 1.1940000000000001e-05, "loss": 0.0421, "step": 3983 }, { "epoch": 7.350877192982456, "grad_norm": 0.5705381035804749, "learning_rate": 1.1943e-05, "loss": 0.0328, "step": 3984 }, { "epoch": 7.352723915050785, "grad_norm": 0.6040554642677307, "learning_rate": 1.1946e-05, "loss": 0.0374, "step": 3985 }, { "epoch": 7.354570637119114, "grad_norm": 0.7622888088226318, "learning_rate": 1.1949e-05, "loss": 0.0333, "step": 3986 }, { "epoch": 7.356417359187442, "grad_norm": 0.615376889705658, "learning_rate": 1.1952e-05, "loss": 0.0304, "step": 3987 }, { "epoch": 7.358264081255771, "grad_norm": 1.0284879207611084, "learning_rate": 1.1955000000000002e-05, "loss": 0.0381, "step": 3988 }, { "epoch": 7.360110803324099, "grad_norm": 0.5528692603111267, "learning_rate": 1.1958000000000001e-05, "loss": 0.0413, "step": 3989 }, { "epoch": 7.3619575253924285, "grad_norm": 0.6359366178512573, "learning_rate": 1.1961e-05, "loss": 0.0353, "step": 3990 }, { "epoch": 7.363804247460757, "grad_norm": 0.5964512825012207, "learning_rate": 1.1964e-05, "loss": 0.0295, "step": 3991 }, { "epoch": 7.365650969529086, "grad_norm": 0.7896857261657715, "learning_rate": 1.1966999999999999e-05, "loss": 0.0322, "step": 3992 }, { "epoch": 7.367497691597414, "grad_norm": 0.7088826298713684, "learning_rate": 1.197e-05, "loss": 0.0424, "step": 3993 }, { "epoch": 7.369344413665743, "grad_norm": 1.7857023477554321, "learning_rate": 1.1973e-05, "loss": 0.111, "step": 3994 }, { "epoch": 7.3711911357340725, "grad_norm": 1.8425383567810059, "learning_rate": 1.1976e-05, "loss": 0.3479, "step": 3995 }, { "epoch": 7.373037857802401, "grad_norm": 1.8757927417755127, "learning_rate": 1.1979e-05, "loss": 0.3627, "step": 3996 }, { "epoch": 7.374884579870729, "grad_norm": 1.019248366355896, "learning_rate": 1.1982e-05, "loss": 0.2404, "step": 3997 }, { "epoch": 7.376731301939058, "grad_norm": 0.9379916787147522, "learning_rate": 1.1985000000000001e-05, "loss": 0.2214, "step": 3998 }, { "epoch": 7.378578024007387, "grad_norm": 0.9834468364715576, "learning_rate": 1.1988000000000001e-05, "loss": 0.2044, "step": 3999 }, { "epoch": 7.3804247460757155, "grad_norm": 1.1186176538467407, "learning_rate": 1.1991000000000001e-05, "loss": 0.2837, "step": 4000 }, { "epoch": 7.3804247460757155, "eval_cer": 0.12484122663763382, "eval_loss": 0.3644801080226898, "eval_runtime": 15.1759, "eval_samples_per_second": 20.032, "eval_steps_per_second": 0.659, "eval_wer": 0.4376438986953185, "step": 4000 }, { "epoch": 7.382271468144045, "grad_norm": 1.1192575693130493, "learning_rate": 1.1994e-05, "loss": 0.1749, "step": 4001 }, { "epoch": 7.384118190212373, "grad_norm": 1.0478407144546509, "learning_rate": 1.1996999999999999e-05, "loss": 0.1652, "step": 4002 }, { "epoch": 7.385964912280702, "grad_norm": 0.9594413638114929, "learning_rate": 1.2e-05, "loss": 0.1445, "step": 4003 }, { "epoch": 7.38781163434903, "grad_norm": 0.8693169951438904, "learning_rate": 1.2003e-05, "loss": 0.1175, "step": 4004 }, { "epoch": 7.3896583564173595, "grad_norm": 0.9398826360702515, "learning_rate": 1.2006e-05, "loss": 0.1551, "step": 4005 }, { "epoch": 7.391505078485688, "grad_norm": 1.0473353862762451, "learning_rate": 1.2009e-05, "loss": 0.1708, "step": 4006 }, { "epoch": 7.393351800554017, "grad_norm": 0.5337854027748108, "learning_rate": 1.2012e-05, "loss": 0.0752, "step": 4007 }, { "epoch": 7.395198522622345, "grad_norm": 1.338207483291626, "learning_rate": 1.2015000000000001e-05, "loss": 0.1319, "step": 4008 }, { "epoch": 7.397045244690674, "grad_norm": 1.462694525718689, "learning_rate": 1.2018e-05, "loss": 0.0987, "step": 4009 }, { "epoch": 7.3988919667590025, "grad_norm": 0.9892147779464722, "learning_rate": 1.2021e-05, "loss": 0.0762, "step": 4010 }, { "epoch": 7.400738688827332, "grad_norm": 0.6074382066726685, "learning_rate": 1.2024e-05, "loss": 0.0722, "step": 4011 }, { "epoch": 7.40258541089566, "grad_norm": 0.8629452586174011, "learning_rate": 1.2027e-05, "loss": 0.0638, "step": 4012 }, { "epoch": 7.404432132963989, "grad_norm": 0.5274789333343506, "learning_rate": 1.2030000000000002e-05, "loss": 0.0608, "step": 4013 }, { "epoch": 7.406278855032317, "grad_norm": 0.6876819729804993, "learning_rate": 1.2033000000000002e-05, "loss": 0.059, "step": 4014 }, { "epoch": 7.4081255771006465, "grad_norm": 0.5091372728347778, "learning_rate": 1.2036e-05, "loss": 0.0488, "step": 4015 }, { "epoch": 7.409972299168975, "grad_norm": 0.47496551275253296, "learning_rate": 1.2039e-05, "loss": 0.0441, "step": 4016 }, { "epoch": 7.411819021237304, "grad_norm": 0.5922196507453918, "learning_rate": 1.2042e-05, "loss": 0.036, "step": 4017 }, { "epoch": 7.413665743305632, "grad_norm": 0.7583091855049133, "learning_rate": 1.2045e-05, "loss": 0.0383, "step": 4018 }, { "epoch": 7.415512465373961, "grad_norm": 0.6326509118080139, "learning_rate": 1.2048e-05, "loss": 0.0419, "step": 4019 }, { "epoch": 7.41735918744229, "grad_norm": 0.5933094024658203, "learning_rate": 1.2051e-05, "loss": 0.0331, "step": 4020 }, { "epoch": 7.419205909510619, "grad_norm": 0.6546366810798645, "learning_rate": 1.2054e-05, "loss": 0.0421, "step": 4021 }, { "epoch": 7.421052631578947, "grad_norm": 0.5232709646224976, "learning_rate": 1.2057e-05, "loss": 0.0411, "step": 4022 }, { "epoch": 7.422899353647276, "grad_norm": 0.48919686675071716, "learning_rate": 1.2060000000000001e-05, "loss": 0.0303, "step": 4023 }, { "epoch": 7.424746075715605, "grad_norm": 0.6700199842453003, "learning_rate": 1.2063000000000001e-05, "loss": 0.0301, "step": 4024 }, { "epoch": 7.426592797783933, "grad_norm": 0.749679446220398, "learning_rate": 1.2066000000000001e-05, "loss": 0.0335, "step": 4025 }, { "epoch": 7.428439519852263, "grad_norm": 0.957880437374115, "learning_rate": 1.2069e-05, "loss": 0.0577, "step": 4026 }, { "epoch": 7.430286241920591, "grad_norm": 0.9046592116355896, "learning_rate": 1.2071999999999999e-05, "loss": 0.0438, "step": 4027 }, { "epoch": 7.43213296398892, "grad_norm": 1.0317715406417847, "learning_rate": 1.2075e-05, "loss": 0.0437, "step": 4028 }, { "epoch": 7.433979686057248, "grad_norm": 0.8226898312568665, "learning_rate": 1.2078e-05, "loss": 0.036, "step": 4029 }, { "epoch": 7.435826408125577, "grad_norm": 0.6602367758750916, "learning_rate": 1.2081e-05, "loss": 0.0349, "step": 4030 }, { "epoch": 7.437673130193906, "grad_norm": 0.6261298656463623, "learning_rate": 1.2084e-05, "loss": 0.0381, "step": 4031 }, { "epoch": 7.439519852262235, "grad_norm": 2.0433335304260254, "learning_rate": 1.2087e-05, "loss": 0.0469, "step": 4032 }, { "epoch": 7.441366574330563, "grad_norm": 0.7569543123245239, "learning_rate": 1.2090000000000001e-05, "loss": 0.0352, "step": 4033 }, { "epoch": 7.443213296398892, "grad_norm": 0.6144489049911499, "learning_rate": 1.2093000000000001e-05, "loss": 0.0375, "step": 4034 }, { "epoch": 7.44506001846722, "grad_norm": 0.8899089694023132, "learning_rate": 1.2096e-05, "loss": 0.0439, "step": 4035 }, { "epoch": 7.44690674053555, "grad_norm": 0.8068799376487732, "learning_rate": 1.2099e-05, "loss": 0.0475, "step": 4036 }, { "epoch": 7.448753462603878, "grad_norm": 0.9589241743087769, "learning_rate": 1.2102e-05, "loss": 0.0353, "step": 4037 }, { "epoch": 7.450600184672207, "grad_norm": 1.7914063930511475, "learning_rate": 1.2105000000000002e-05, "loss": 0.0447, "step": 4038 }, { "epoch": 7.452446906740535, "grad_norm": 0.6683489084243774, "learning_rate": 1.2108e-05, "loss": 0.0395, "step": 4039 }, { "epoch": 7.454293628808864, "grad_norm": 1.6444193124771118, "learning_rate": 1.2111e-05, "loss": 0.0356, "step": 4040 }, { "epoch": 7.456140350877193, "grad_norm": 0.9469858407974243, "learning_rate": 1.2114e-05, "loss": 0.0649, "step": 4041 }, { "epoch": 7.457987072945522, "grad_norm": 0.9198259115219116, "learning_rate": 1.2117e-05, "loss": 0.0565, "step": 4042 }, { "epoch": 7.45983379501385, "grad_norm": 1.2903748750686646, "learning_rate": 1.2120000000000001e-05, "loss": 0.0419, "step": 4043 }, { "epoch": 7.461680517082179, "grad_norm": 1.6258609294891357, "learning_rate": 1.2123e-05, "loss": 0.0701, "step": 4044 }, { "epoch": 7.463527239150508, "grad_norm": 1.1336464881896973, "learning_rate": 1.2126e-05, "loss": 0.3336, "step": 4045 }, { "epoch": 7.465373961218837, "grad_norm": 1.0165140628814697, "learning_rate": 1.2129e-05, "loss": 0.2929, "step": 4046 }, { "epoch": 7.467220683287165, "grad_norm": 0.9984672665596008, "learning_rate": 1.2132e-05, "loss": 0.27, "step": 4047 }, { "epoch": 7.469067405355494, "grad_norm": 0.8502119779586792, "learning_rate": 1.2135000000000002e-05, "loss": 0.2118, "step": 4048 }, { "epoch": 7.470914127423823, "grad_norm": 1.1467849016189575, "learning_rate": 1.2138000000000001e-05, "loss": 0.2784, "step": 4049 }, { "epoch": 7.472760849492151, "grad_norm": 1.2587987184524536, "learning_rate": 1.2141000000000001e-05, "loss": 0.172, "step": 4050 }, { "epoch": 7.4746075715604805, "grad_norm": 1.3149694204330444, "learning_rate": 1.2144e-05, "loss": 0.2085, "step": 4051 }, { "epoch": 7.476454293628809, "grad_norm": 0.7081968784332275, "learning_rate": 1.2146999999999999e-05, "loss": 0.1349, "step": 4052 }, { "epoch": 7.478301015697138, "grad_norm": 1.1603951454162598, "learning_rate": 1.215e-05, "loss": 0.1923, "step": 4053 }, { "epoch": 7.480147737765466, "grad_norm": 1.0851069688796997, "learning_rate": 1.2153e-05, "loss": 0.2272, "step": 4054 }, { "epoch": 7.481994459833795, "grad_norm": 1.9162254333496094, "learning_rate": 1.2156e-05, "loss": 0.1324, "step": 4055 }, { "epoch": 7.4838411819021236, "grad_norm": 1.0658905506134033, "learning_rate": 1.2159e-05, "loss": 0.2051, "step": 4056 }, { "epoch": 7.485687903970453, "grad_norm": 0.8997984528541565, "learning_rate": 1.2162e-05, "loss": 0.1018, "step": 4057 }, { "epoch": 7.487534626038781, "grad_norm": 0.6505519151687622, "learning_rate": 1.2165000000000001e-05, "loss": 0.0812, "step": 4058 }, { "epoch": 7.48938134810711, "grad_norm": 0.5897863507270813, "learning_rate": 1.2168000000000001e-05, "loss": 0.0747, "step": 4059 }, { "epoch": 7.491228070175438, "grad_norm": 0.712464451789856, "learning_rate": 1.2171000000000001e-05, "loss": 0.0568, "step": 4060 }, { "epoch": 7.4930747922437675, "grad_norm": 0.5935330986976624, "learning_rate": 1.2174e-05, "loss": 0.0619, "step": 4061 }, { "epoch": 7.494921514312096, "grad_norm": 0.8959807753562927, "learning_rate": 1.2177e-05, "loss": 0.0852, "step": 4062 }, { "epoch": 7.496768236380425, "grad_norm": 0.7074034214019775, "learning_rate": 1.2180000000000002e-05, "loss": 0.0434, "step": 4063 }, { "epoch": 7.498614958448753, "grad_norm": 0.6812528371810913, "learning_rate": 1.2183e-05, "loss": 0.0431, "step": 4064 }, { "epoch": 7.500461680517082, "grad_norm": 0.4548470675945282, "learning_rate": 1.2186e-05, "loss": 0.0269, "step": 4065 }, { "epoch": 7.5023084025854105, "grad_norm": 0.8323044776916504, "learning_rate": 1.2189e-05, "loss": 0.0711, "step": 4066 }, { "epoch": 7.50415512465374, "grad_norm": 0.8252567052841187, "learning_rate": 1.2192e-05, "loss": 0.0321, "step": 4067 }, { "epoch": 7.506001846722068, "grad_norm": 0.6569534540176392, "learning_rate": 1.2195e-05, "loss": 0.0261, "step": 4068 }, { "epoch": 7.507848568790397, "grad_norm": 0.9114178419113159, "learning_rate": 1.2198e-05, "loss": 0.0442, "step": 4069 }, { "epoch": 7.509695290858726, "grad_norm": 0.7195739150047302, "learning_rate": 1.2201e-05, "loss": 0.0278, "step": 4070 }, { "epoch": 7.5115420129270545, "grad_norm": 1.6552071571350098, "learning_rate": 1.2204e-05, "loss": 0.0553, "step": 4071 }, { "epoch": 7.513388734995383, "grad_norm": 0.5468800067901611, "learning_rate": 1.2207e-05, "loss": 0.0267, "step": 4072 }, { "epoch": 7.515235457063712, "grad_norm": 0.7049787044525146, "learning_rate": 1.221e-05, "loss": 0.044, "step": 4073 }, { "epoch": 7.517082179132041, "grad_norm": 0.3411400020122528, "learning_rate": 1.2213000000000001e-05, "loss": 0.0176, "step": 4074 }, { "epoch": 7.518928901200369, "grad_norm": 1.0625396966934204, "learning_rate": 1.2216000000000001e-05, "loss": 0.0541, "step": 4075 }, { "epoch": 7.520775623268698, "grad_norm": 0.6516671776771545, "learning_rate": 1.2219e-05, "loss": 0.0417, "step": 4076 }, { "epoch": 7.522622345337027, "grad_norm": 0.6971566677093506, "learning_rate": 1.2222e-05, "loss": 0.0484, "step": 4077 }, { "epoch": 7.524469067405356, "grad_norm": 0.8688866496086121, "learning_rate": 1.2224999999999999e-05, "loss": 0.0487, "step": 4078 }, { "epoch": 7.526315789473684, "grad_norm": 0.49682992696762085, "learning_rate": 1.2228e-05, "loss": 0.0245, "step": 4079 }, { "epoch": 7.528162511542013, "grad_norm": 1.0755348205566406, "learning_rate": 1.2231e-05, "loss": 0.0438, "step": 4080 }, { "epoch": 7.5300092336103415, "grad_norm": 1.0635411739349365, "learning_rate": 1.2234e-05, "loss": 0.0455, "step": 4081 }, { "epoch": 7.531855955678671, "grad_norm": 0.7985247373580933, "learning_rate": 1.2237e-05, "loss": 0.0428, "step": 4082 }, { "epoch": 7.533702677746999, "grad_norm": 0.7823305726051331, "learning_rate": 1.224e-05, "loss": 0.0435, "step": 4083 }, { "epoch": 7.535549399815328, "grad_norm": 0.9674842953681946, "learning_rate": 1.2243000000000001e-05, "loss": 0.0624, "step": 4084 }, { "epoch": 7.537396121883656, "grad_norm": 0.6736524105072021, "learning_rate": 1.2246000000000001e-05, "loss": 0.0323, "step": 4085 }, { "epoch": 7.539242843951985, "grad_norm": 1.1502015590667725, "learning_rate": 1.2249e-05, "loss": 0.0457, "step": 4086 }, { "epoch": 7.541089566020314, "grad_norm": 0.727032482624054, "learning_rate": 1.2252e-05, "loss": 0.0485, "step": 4087 }, { "epoch": 7.542936288088643, "grad_norm": 0.7703151702880859, "learning_rate": 1.2254999999999999e-05, "loss": 0.0351, "step": 4088 }, { "epoch": 7.544783010156971, "grad_norm": 1.0239579677581787, "learning_rate": 1.2258e-05, "loss": 0.0526, "step": 4089 }, { "epoch": 7.5466297322253, "grad_norm": 0.6008153557777405, "learning_rate": 1.2261e-05, "loss": 0.0354, "step": 4090 }, { "epoch": 7.5484764542936285, "grad_norm": 0.7614055275917053, "learning_rate": 1.2264e-05, "loss": 0.0396, "step": 4091 }, { "epoch": 7.550323176361958, "grad_norm": 0.5776079297065735, "learning_rate": 1.2267e-05, "loss": 0.0426, "step": 4092 }, { "epoch": 7.552169898430286, "grad_norm": 0.7658137083053589, "learning_rate": 1.227e-05, "loss": 0.0488, "step": 4093 }, { "epoch": 7.554016620498615, "grad_norm": 1.04031240940094, "learning_rate": 1.2273000000000001e-05, "loss": 0.075, "step": 4094 }, { "epoch": 7.555863342566944, "grad_norm": 1.6620241403579712, "learning_rate": 1.2276e-05, "loss": 0.371, "step": 4095 }, { "epoch": 7.557710064635272, "grad_norm": 0.8936166763305664, "learning_rate": 1.2279e-05, "loss": 0.2259, "step": 4096 }, { "epoch": 7.559556786703601, "grad_norm": 0.7464591264724731, "learning_rate": 1.2282e-05, "loss": 0.231, "step": 4097 }, { "epoch": 7.56140350877193, "grad_norm": 1.0560076236724854, "learning_rate": 1.2285e-05, "loss": 0.2953, "step": 4098 }, { "epoch": 7.563250230840259, "grad_norm": 0.6979200839996338, "learning_rate": 1.2288000000000002e-05, "loss": 0.1647, "step": 4099 }, { "epoch": 7.565096952908587, "grad_norm": 0.9179125428199768, "learning_rate": 1.2291000000000001e-05, "loss": 0.2385, "step": 4100 }, { "epoch": 7.566943674976916, "grad_norm": 0.7582529783248901, "learning_rate": 1.2294e-05, "loss": 0.14, "step": 4101 }, { "epoch": 7.568790397045245, "grad_norm": 1.0879583358764648, "learning_rate": 1.2297e-05, "loss": 0.1912, "step": 4102 }, { "epoch": 7.570637119113574, "grad_norm": 0.9386312365531921, "learning_rate": 1.2299999999999999e-05, "loss": 0.1804, "step": 4103 }, { "epoch": 7.572483841181902, "grad_norm": 0.9516357779502869, "learning_rate": 1.2303e-05, "loss": 0.1823, "step": 4104 }, { "epoch": 7.574330563250231, "grad_norm": 0.9170127511024475, "learning_rate": 1.2306e-05, "loss": 0.1158, "step": 4105 }, { "epoch": 7.576177285318559, "grad_norm": 0.9759851098060608, "learning_rate": 1.2309e-05, "loss": 0.1446, "step": 4106 }, { "epoch": 7.5780240073868885, "grad_norm": 0.5087369084358215, "learning_rate": 1.2312e-05, "loss": 0.1097, "step": 4107 }, { "epoch": 7.579870729455217, "grad_norm": 0.6919082999229431, "learning_rate": 1.2315e-05, "loss": 0.1473, "step": 4108 }, { "epoch": 7.581717451523546, "grad_norm": 0.711525559425354, "learning_rate": 1.2318000000000001e-05, "loss": 0.0869, "step": 4109 }, { "epoch": 7.583564173591874, "grad_norm": 0.5739111304283142, "learning_rate": 1.2321000000000001e-05, "loss": 0.1035, "step": 4110 }, { "epoch": 7.585410895660203, "grad_norm": 0.7222297191619873, "learning_rate": 1.2324000000000001e-05, "loss": 0.0523, "step": 4111 }, { "epoch": 7.587257617728532, "grad_norm": 0.6934823989868164, "learning_rate": 1.2327e-05, "loss": 0.0441, "step": 4112 }, { "epoch": 7.589104339796861, "grad_norm": 0.6187019348144531, "learning_rate": 1.2329999999999999e-05, "loss": 0.0379, "step": 4113 }, { "epoch": 7.590951061865189, "grad_norm": 0.5205591320991516, "learning_rate": 1.2333e-05, "loss": 0.0589, "step": 4114 }, { "epoch": 7.592797783933518, "grad_norm": 0.7255731821060181, "learning_rate": 1.2336e-05, "loss": 0.0452, "step": 4115 }, { "epoch": 7.594644506001846, "grad_norm": 0.6000263690948486, "learning_rate": 1.2339e-05, "loss": 0.0625, "step": 4116 }, { "epoch": 7.5964912280701755, "grad_norm": 0.6422172784805298, "learning_rate": 1.2342e-05, "loss": 0.0465, "step": 4117 }, { "epoch": 7.598337950138504, "grad_norm": 0.5606541633605957, "learning_rate": 1.2345e-05, "loss": 0.0379, "step": 4118 }, { "epoch": 7.600184672206833, "grad_norm": 0.3826885521411896, "learning_rate": 1.2348000000000001e-05, "loss": 0.021, "step": 4119 }, { "epoch": 7.602031394275162, "grad_norm": 0.743736982345581, "learning_rate": 1.2351e-05, "loss": 0.054, "step": 4120 }, { "epoch": 7.60387811634349, "grad_norm": 0.5651935935020447, "learning_rate": 1.2354e-05, "loss": 0.0403, "step": 4121 }, { "epoch": 7.605724838411819, "grad_norm": 0.5326204895973206, "learning_rate": 1.2357e-05, "loss": 0.0312, "step": 4122 }, { "epoch": 7.607571560480148, "grad_norm": 0.6177520751953125, "learning_rate": 1.236e-05, "loss": 0.0365, "step": 4123 }, { "epoch": 7.609418282548477, "grad_norm": 1.1195601224899292, "learning_rate": 1.2363000000000002e-05, "loss": 0.099, "step": 4124 }, { "epoch": 7.611265004616805, "grad_norm": 0.590433657169342, "learning_rate": 1.2366e-05, "loss": 0.0362, "step": 4125 }, { "epoch": 7.613111726685134, "grad_norm": 0.7750802040100098, "learning_rate": 1.2369e-05, "loss": 0.0409, "step": 4126 }, { "epoch": 7.6149584487534625, "grad_norm": 0.581501841545105, "learning_rate": 1.2372e-05, "loss": 0.0392, "step": 4127 }, { "epoch": 7.616805170821792, "grad_norm": 0.3566802740097046, "learning_rate": 1.2375e-05, "loss": 0.0193, "step": 4128 }, { "epoch": 7.61865189289012, "grad_norm": 0.6354184746742249, "learning_rate": 1.2378e-05, "loss": 0.0352, "step": 4129 }, { "epoch": 7.620498614958449, "grad_norm": 0.6342035531997681, "learning_rate": 1.2381e-05, "loss": 0.0585, "step": 4130 }, { "epoch": 7.622345337026777, "grad_norm": 0.473110556602478, "learning_rate": 1.2384e-05, "loss": 0.0255, "step": 4131 }, { "epoch": 7.624192059095106, "grad_norm": 0.5356754064559937, "learning_rate": 1.2387e-05, "loss": 0.0334, "step": 4132 }, { "epoch": 7.626038781163435, "grad_norm": 0.5536307096481323, "learning_rate": 1.239e-05, "loss": 0.0506, "step": 4133 }, { "epoch": 7.627885503231764, "grad_norm": 0.7165328860282898, "learning_rate": 1.2393000000000001e-05, "loss": 0.0605, "step": 4134 }, { "epoch": 7.629732225300092, "grad_norm": 0.48536062240600586, "learning_rate": 1.2396000000000001e-05, "loss": 0.0277, "step": 4135 }, { "epoch": 7.631578947368421, "grad_norm": 0.6589899063110352, "learning_rate": 1.2399000000000001e-05, "loss": 0.0464, "step": 4136 }, { "epoch": 7.6334256694367495, "grad_norm": 0.8539356589317322, "learning_rate": 1.2402e-05, "loss": 0.0503, "step": 4137 }, { "epoch": 7.635272391505079, "grad_norm": 0.9269492626190186, "learning_rate": 1.2404999999999999e-05, "loss": 0.0487, "step": 4138 }, { "epoch": 7.637119113573407, "grad_norm": 0.7299609780311584, "learning_rate": 1.2408e-05, "loss": 0.0516, "step": 4139 }, { "epoch": 7.638965835641736, "grad_norm": 0.7203462719917297, "learning_rate": 1.2411e-05, "loss": 0.0255, "step": 4140 }, { "epoch": 7.640812557710064, "grad_norm": 0.5248554348945618, "learning_rate": 1.2414e-05, "loss": 0.0307, "step": 4141 }, { "epoch": 7.642659279778393, "grad_norm": 0.7557847499847412, "learning_rate": 1.2417e-05, "loss": 0.0451, "step": 4142 }, { "epoch": 7.644506001846722, "grad_norm": 0.8592972755432129, "learning_rate": 1.242e-05, "loss": 0.0482, "step": 4143 }, { "epoch": 7.646352723915051, "grad_norm": 0.8314827680587769, "learning_rate": 1.2423000000000001e-05, "loss": 0.0517, "step": 4144 }, { "epoch": 7.64819944598338, "grad_norm": 1.7957693338394165, "learning_rate": 1.2426000000000001e-05, "loss": 0.3635, "step": 4145 }, { "epoch": 7.650046168051708, "grad_norm": 1.1916546821594238, "learning_rate": 1.2429e-05, "loss": 0.297, "step": 4146 }, { "epoch": 7.6518928901200365, "grad_norm": 1.383188009262085, "learning_rate": 1.2432e-05, "loss": 0.2379, "step": 4147 }, { "epoch": 7.653739612188366, "grad_norm": 1.1948586702346802, "learning_rate": 1.2435e-05, "loss": 0.2576, "step": 4148 }, { "epoch": 7.655586334256695, "grad_norm": 0.7643679976463318, "learning_rate": 1.2438000000000002e-05, "loss": 0.1959, "step": 4149 }, { "epoch": 7.657433056325023, "grad_norm": 0.8329753875732422, "learning_rate": 1.2441e-05, "loss": 0.2095, "step": 4150 }, { "epoch": 7.659279778393352, "grad_norm": 0.8791571855545044, "learning_rate": 1.2444e-05, "loss": 0.169, "step": 4151 }, { "epoch": 7.66112650046168, "grad_norm": 0.766928493976593, "learning_rate": 1.2447e-05, "loss": 0.1511, "step": 4152 }, { "epoch": 7.66297322253001, "grad_norm": 0.9058430790901184, "learning_rate": 1.245e-05, "loss": 0.1512, "step": 4153 }, { "epoch": 7.664819944598338, "grad_norm": 0.6345282196998596, "learning_rate": 1.2453000000000001e-05, "loss": 0.1035, "step": 4154 }, { "epoch": 7.666666666666667, "grad_norm": 2.050560235977173, "learning_rate": 1.2456e-05, "loss": 0.1019, "step": 4155 }, { "epoch": 7.668513388734995, "grad_norm": 0.7613061666488647, "learning_rate": 1.2459e-05, "loss": 0.113, "step": 4156 }, { "epoch": 7.670360110803324, "grad_norm": 1.1732903718948364, "learning_rate": 1.2462e-05, "loss": 0.191, "step": 4157 }, { "epoch": 7.672206832871653, "grad_norm": 0.9498212933540344, "learning_rate": 1.2465e-05, "loss": 0.0967, "step": 4158 }, { "epoch": 7.674053554939982, "grad_norm": 0.49174702167510986, "learning_rate": 1.2468000000000002e-05, "loss": 0.0451, "step": 4159 }, { "epoch": 7.67590027700831, "grad_norm": 0.7871057391166687, "learning_rate": 1.2471000000000001e-05, "loss": 0.0774, "step": 4160 }, { "epoch": 7.677746999076639, "grad_norm": 0.4107961356639862, "learning_rate": 1.2474000000000001e-05, "loss": 0.0401, "step": 4161 }, { "epoch": 7.679593721144967, "grad_norm": 0.8008513450622559, "learning_rate": 1.2477e-05, "loss": 0.0526, "step": 4162 }, { "epoch": 7.6814404432132966, "grad_norm": 0.46558696031570435, "learning_rate": 1.2479999999999999e-05, "loss": 0.0307, "step": 4163 }, { "epoch": 7.683287165281625, "grad_norm": 0.44487065076828003, "learning_rate": 1.2483e-05, "loss": 0.036, "step": 4164 }, { "epoch": 7.685133887349954, "grad_norm": 0.5548285841941833, "learning_rate": 1.2486e-05, "loss": 0.0441, "step": 4165 }, { "epoch": 7.686980609418282, "grad_norm": 0.5006272196769714, "learning_rate": 1.2489e-05, "loss": 0.0331, "step": 4166 }, { "epoch": 7.688827331486611, "grad_norm": 0.3948770761489868, "learning_rate": 1.2492e-05, "loss": 0.0378, "step": 4167 }, { "epoch": 7.69067405355494, "grad_norm": 0.4485151767730713, "learning_rate": 1.2495e-05, "loss": 0.0309, "step": 4168 }, { "epoch": 7.692520775623269, "grad_norm": 0.5578030347824097, "learning_rate": 1.2498000000000001e-05, "loss": 0.0414, "step": 4169 }, { "epoch": 7.694367497691598, "grad_norm": 0.4920484721660614, "learning_rate": 1.2501000000000001e-05, "loss": 0.0289, "step": 4170 }, { "epoch": 7.696214219759926, "grad_norm": 0.7462189793586731, "learning_rate": 1.2504000000000001e-05, "loss": 0.0612, "step": 4171 }, { "epoch": 7.698060941828254, "grad_norm": 0.4768451452255249, "learning_rate": 1.2507e-05, "loss": 0.0297, "step": 4172 }, { "epoch": 7.6999076638965835, "grad_norm": 0.4360469877719879, "learning_rate": 1.251e-05, "loss": 0.0379, "step": 4173 }, { "epoch": 7.701754385964913, "grad_norm": 0.766214907169342, "learning_rate": 1.2513e-05, "loss": 0.0379, "step": 4174 }, { "epoch": 7.703601108033241, "grad_norm": 1.5813432931900024, "learning_rate": 1.2516e-05, "loss": 0.078, "step": 4175 }, { "epoch": 7.70544783010157, "grad_norm": 0.6080664396286011, "learning_rate": 1.2519e-05, "loss": 0.0409, "step": 4176 }, { "epoch": 7.707294552169898, "grad_norm": 1.1184157133102417, "learning_rate": 1.2522e-05, "loss": 0.0319, "step": 4177 }, { "epoch": 7.7091412742382275, "grad_norm": 0.6684539318084717, "learning_rate": 1.2525e-05, "loss": 0.0299, "step": 4178 }, { "epoch": 7.710987996306556, "grad_norm": 0.6261076331138611, "learning_rate": 1.2528000000000001e-05, "loss": 0.0342, "step": 4179 }, { "epoch": 7.712834718374885, "grad_norm": 0.5831961631774902, "learning_rate": 1.2531e-05, "loss": 0.0277, "step": 4180 }, { "epoch": 7.714681440443213, "grad_norm": 0.5599172711372375, "learning_rate": 1.2534e-05, "loss": 0.0453, "step": 4181 }, { "epoch": 7.716528162511542, "grad_norm": 0.5989073514938354, "learning_rate": 1.2537e-05, "loss": 0.0219, "step": 4182 }, { "epoch": 7.7183748845798705, "grad_norm": 0.48640766739845276, "learning_rate": 1.254e-05, "loss": 0.028, "step": 4183 }, { "epoch": 7.7202216066482, "grad_norm": 0.7419519424438477, "learning_rate": 1.2543000000000002e-05, "loss": 0.0479, "step": 4184 }, { "epoch": 7.722068328716528, "grad_norm": 0.4289011061191559, "learning_rate": 1.2546000000000002e-05, "loss": 0.0187, "step": 4185 }, { "epoch": 7.723915050784857, "grad_norm": 0.9601662158966064, "learning_rate": 1.2549000000000001e-05, "loss": 0.0284, "step": 4186 }, { "epoch": 7.725761772853185, "grad_norm": 0.7918846607208252, "learning_rate": 1.2552e-05, "loss": 0.0461, "step": 4187 }, { "epoch": 7.7276084949215145, "grad_norm": 0.7785574793815613, "learning_rate": 1.2555e-05, "loss": 0.0362, "step": 4188 }, { "epoch": 7.729455216989843, "grad_norm": 0.8537510633468628, "learning_rate": 1.2558e-05, "loss": 0.0385, "step": 4189 }, { "epoch": 7.731301939058172, "grad_norm": 1.2144277095794678, "learning_rate": 1.2561e-05, "loss": 0.0485, "step": 4190 }, { "epoch": 7.7331486611265, "grad_norm": 1.2213268280029297, "learning_rate": 1.2564e-05, "loss": 0.038, "step": 4191 }, { "epoch": 7.734995383194829, "grad_norm": 0.6018048524856567, "learning_rate": 1.2567e-05, "loss": 0.0365, "step": 4192 }, { "epoch": 7.7368421052631575, "grad_norm": 0.8807831406593323, "learning_rate": 1.257e-05, "loss": 0.0544, "step": 4193 }, { "epoch": 7.738688827331487, "grad_norm": 0.9523292183876038, "learning_rate": 1.2573e-05, "loss": 0.088, "step": 4194 }, { "epoch": 7.740535549399816, "grad_norm": 1.3522166013717651, "learning_rate": 1.2576000000000001e-05, "loss": 0.3798, "step": 4195 }, { "epoch": 7.742382271468144, "grad_norm": 1.0144202709197998, "learning_rate": 1.2579000000000001e-05, "loss": 0.3224, "step": 4196 }, { "epoch": 7.744228993536472, "grad_norm": 1.2468231916427612, "learning_rate": 1.2582e-05, "loss": 0.2713, "step": 4197 }, { "epoch": 7.7460757156048015, "grad_norm": 1.5040656328201294, "learning_rate": 1.2585e-05, "loss": 0.2684, "step": 4198 }, { "epoch": 7.747922437673131, "grad_norm": 1.2979474067687988, "learning_rate": 1.2587999999999999e-05, "loss": 0.2113, "step": 4199 }, { "epoch": 7.749769159741459, "grad_norm": 1.4345593452453613, "learning_rate": 1.2591e-05, "loss": 0.2385, "step": 4200 }, { "epoch": 7.751615881809788, "grad_norm": 1.5685144662857056, "learning_rate": 1.2594e-05, "loss": 0.2577, "step": 4201 }, { "epoch": 7.753462603878116, "grad_norm": 1.26348078250885, "learning_rate": 1.2597e-05, "loss": 0.1832, "step": 4202 }, { "epoch": 7.755309325946445, "grad_norm": 0.8908654451370239, "learning_rate": 1.26e-05, "loss": 0.1213, "step": 4203 }, { "epoch": 7.757156048014774, "grad_norm": 0.7878689169883728, "learning_rate": 1.2603e-05, "loss": 0.1387, "step": 4204 }, { "epoch": 7.759002770083103, "grad_norm": 0.7852795720100403, "learning_rate": 1.2606000000000001e-05, "loss": 0.147, "step": 4205 }, { "epoch": 7.760849492151431, "grad_norm": 0.8014175295829773, "learning_rate": 1.2609e-05, "loss": 0.1322, "step": 4206 }, { "epoch": 7.76269621421976, "grad_norm": 0.8985889554023743, "learning_rate": 1.2612e-05, "loss": 0.1557, "step": 4207 }, { "epoch": 7.7645429362880884, "grad_norm": 1.881499171257019, "learning_rate": 1.2615e-05, "loss": 0.0711, "step": 4208 }, { "epoch": 7.766389658356418, "grad_norm": 0.6447560787200928, "learning_rate": 1.2618e-05, "loss": 0.0909, "step": 4209 }, { "epoch": 7.768236380424746, "grad_norm": 0.5465224385261536, "learning_rate": 1.2621000000000002e-05, "loss": 0.0681, "step": 4210 }, { "epoch": 7.770083102493075, "grad_norm": 0.6470277905464172, "learning_rate": 1.2624e-05, "loss": 0.0669, "step": 4211 }, { "epoch": 7.771929824561403, "grad_norm": 0.58457350730896, "learning_rate": 1.2627e-05, "loss": 0.0585, "step": 4212 }, { "epoch": 7.773776546629732, "grad_norm": 0.6071618795394897, "learning_rate": 1.263e-05, "loss": 0.0395, "step": 4213 }, { "epoch": 7.775623268698061, "grad_norm": 0.676609456539154, "learning_rate": 1.2633e-05, "loss": 0.0525, "step": 4214 }, { "epoch": 7.77746999076639, "grad_norm": 0.3999547064304352, "learning_rate": 1.2636e-05, "loss": 0.037, "step": 4215 }, { "epoch": 7.779316712834718, "grad_norm": 0.5443705916404724, "learning_rate": 1.2639e-05, "loss": 0.0371, "step": 4216 }, { "epoch": 7.781163434903047, "grad_norm": 0.9816798567771912, "learning_rate": 1.2642e-05, "loss": 0.0419, "step": 4217 }, { "epoch": 7.783010156971375, "grad_norm": 0.6923866271972656, "learning_rate": 1.2645e-05, "loss": 0.0465, "step": 4218 }, { "epoch": 7.784856879039705, "grad_norm": 0.5095269083976746, "learning_rate": 1.2648e-05, "loss": 0.0343, "step": 4219 }, { "epoch": 7.786703601108034, "grad_norm": 0.6733057498931885, "learning_rate": 1.2651000000000001e-05, "loss": 0.0409, "step": 4220 }, { "epoch": 7.788550323176362, "grad_norm": 0.8574805855751038, "learning_rate": 1.2654000000000001e-05, "loss": 0.0374, "step": 4221 }, { "epoch": 7.79039704524469, "grad_norm": 0.6730523109436035, "learning_rate": 1.2657000000000001e-05, "loss": 0.0235, "step": 4222 }, { "epoch": 7.792243767313019, "grad_norm": 0.6429433226585388, "learning_rate": 1.2659999999999999e-05, "loss": 0.0508, "step": 4223 }, { "epoch": 7.7940904893813485, "grad_norm": 0.6499189138412476, "learning_rate": 1.2662999999999999e-05, "loss": 0.0388, "step": 4224 }, { "epoch": 7.795937211449677, "grad_norm": 0.8994177579879761, "learning_rate": 1.2666e-05, "loss": 0.0443, "step": 4225 }, { "epoch": 7.797783933518006, "grad_norm": 0.7503787279129028, "learning_rate": 1.2669e-05, "loss": 0.0268, "step": 4226 }, { "epoch": 7.799630655586334, "grad_norm": 0.5890717506408691, "learning_rate": 1.2672e-05, "loss": 0.0382, "step": 4227 }, { "epoch": 7.801477377654663, "grad_norm": 0.5628544092178345, "learning_rate": 1.2675e-05, "loss": 0.0344, "step": 4228 }, { "epoch": 7.803324099722992, "grad_norm": 0.4928682744503021, "learning_rate": 1.2678e-05, "loss": 0.0325, "step": 4229 }, { "epoch": 7.805170821791321, "grad_norm": 0.787543535232544, "learning_rate": 1.2681000000000001e-05, "loss": 0.0362, "step": 4230 }, { "epoch": 7.807017543859649, "grad_norm": 1.0132980346679688, "learning_rate": 1.2684000000000001e-05, "loss": 0.0364, "step": 4231 }, { "epoch": 7.808864265927978, "grad_norm": 0.5771153569221497, "learning_rate": 1.2687e-05, "loss": 0.0343, "step": 4232 }, { "epoch": 7.810710987996306, "grad_norm": 0.8791329860687256, "learning_rate": 1.269e-05, "loss": 0.0577, "step": 4233 }, { "epoch": 7.8125577100646355, "grad_norm": 0.7927799820899963, "learning_rate": 1.2693e-05, "loss": 0.0372, "step": 4234 }, { "epoch": 7.814404432132964, "grad_norm": 0.7587766647338867, "learning_rate": 1.2696000000000002e-05, "loss": 0.0323, "step": 4235 }, { "epoch": 7.816251154201293, "grad_norm": 1.1068307161331177, "learning_rate": 1.2699e-05, "loss": 0.0602, "step": 4236 }, { "epoch": 7.818097876269621, "grad_norm": 1.694675326347351, "learning_rate": 1.2702e-05, "loss": 0.0458, "step": 4237 }, { "epoch": 7.81994459833795, "grad_norm": 0.6220648288726807, "learning_rate": 1.2705e-05, "loss": 0.0459, "step": 4238 }, { "epoch": 7.821791320406279, "grad_norm": 0.6234679222106934, "learning_rate": 1.2708e-05, "loss": 0.0373, "step": 4239 }, { "epoch": 7.823638042474608, "grad_norm": 0.6579725742340088, "learning_rate": 1.2711e-05, "loss": 0.0335, "step": 4240 }, { "epoch": 7.825484764542936, "grad_norm": 0.7174988985061646, "learning_rate": 1.2714e-05, "loss": 0.0392, "step": 4241 }, { "epoch": 7.827331486611265, "grad_norm": 0.9258104562759399, "learning_rate": 1.2717e-05, "loss": 0.0305, "step": 4242 }, { "epoch": 7.829178208679593, "grad_norm": 0.7623401284217834, "learning_rate": 1.272e-05, "loss": 0.0434, "step": 4243 }, { "epoch": 7.8310249307479225, "grad_norm": 0.6249094009399414, "learning_rate": 1.2723e-05, "loss": 0.0507, "step": 4244 }, { "epoch": 7.832871652816252, "grad_norm": 1.315299153327942, "learning_rate": 1.2726000000000001e-05, "loss": 0.343, "step": 4245 }, { "epoch": 7.83471837488458, "grad_norm": 0.8846774101257324, "learning_rate": 1.2729000000000001e-05, "loss": 0.2324, "step": 4246 }, { "epoch": 7.836565096952908, "grad_norm": 0.7561314702033997, "learning_rate": 1.2732000000000001e-05, "loss": 0.2017, "step": 4247 }, { "epoch": 7.838411819021237, "grad_norm": 0.9137879014015198, "learning_rate": 1.2735e-05, "loss": 0.2314, "step": 4248 }, { "epoch": 7.840258541089566, "grad_norm": 0.7995381355285645, "learning_rate": 1.2737999999999999e-05, "loss": 0.2356, "step": 4249 }, { "epoch": 7.842105263157895, "grad_norm": 0.9145019054412842, "learning_rate": 1.2741e-05, "loss": 0.2342, "step": 4250 }, { "epoch": 7.843951985226224, "grad_norm": 0.6350741982460022, "learning_rate": 1.2744e-05, "loss": 0.1745, "step": 4251 }, { "epoch": 7.845798707294552, "grad_norm": 1.0012751817703247, "learning_rate": 1.2747e-05, "loss": 0.1851, "step": 4252 }, { "epoch": 7.847645429362881, "grad_norm": 1.0654654502868652, "learning_rate": 1.275e-05, "loss": 0.197, "step": 4253 }, { "epoch": 7.8494921514312095, "grad_norm": 0.7405928373336792, "learning_rate": 1.2753e-05, "loss": 0.1307, "step": 4254 }, { "epoch": 7.851338873499539, "grad_norm": 1.0535868406295776, "learning_rate": 1.2756000000000001e-05, "loss": 0.1201, "step": 4255 }, { "epoch": 7.853185595567867, "grad_norm": 0.6765692234039307, "learning_rate": 1.2759000000000001e-05, "loss": 0.0829, "step": 4256 }, { "epoch": 7.855032317636196, "grad_norm": 0.9713176488876343, "learning_rate": 1.2762e-05, "loss": 0.1442, "step": 4257 }, { "epoch": 7.856879039704524, "grad_norm": 0.7366648316383362, "learning_rate": 1.2765e-05, "loss": 0.0868, "step": 4258 }, { "epoch": 7.858725761772853, "grad_norm": 0.5480320453643799, "learning_rate": 1.2768e-05, "loss": 0.0614, "step": 4259 }, { "epoch": 7.860572483841182, "grad_norm": 0.5844116806983948, "learning_rate": 1.2771e-05, "loss": 0.0615, "step": 4260 }, { "epoch": 7.862419205909511, "grad_norm": 0.6884409785270691, "learning_rate": 1.2774e-05, "loss": 0.0648, "step": 4261 }, { "epoch": 7.864265927977839, "grad_norm": 0.7041671276092529, "learning_rate": 1.2777e-05, "loss": 0.0585, "step": 4262 }, { "epoch": 7.866112650046168, "grad_norm": 1.6187162399291992, "learning_rate": 1.278e-05, "loss": 0.0443, "step": 4263 }, { "epoch": 7.8679593721144965, "grad_norm": 0.5576724410057068, "learning_rate": 1.2783e-05, "loss": 0.0413, "step": 4264 }, { "epoch": 7.869806094182826, "grad_norm": 0.644615113735199, "learning_rate": 1.2786000000000001e-05, "loss": 0.0376, "step": 4265 }, { "epoch": 7.871652816251154, "grad_norm": 0.7323140501976013, "learning_rate": 1.2789e-05, "loss": 0.0435, "step": 4266 }, { "epoch": 7.873499538319483, "grad_norm": 0.9791684150695801, "learning_rate": 1.2792e-05, "loss": 0.0359, "step": 4267 }, { "epoch": 7.875346260387811, "grad_norm": 1.0607141256332397, "learning_rate": 1.2795e-05, "loss": 0.052, "step": 4268 }, { "epoch": 7.87719298245614, "grad_norm": 0.684356153011322, "learning_rate": 1.2798e-05, "loss": 0.0328, "step": 4269 }, { "epoch": 7.87903970452447, "grad_norm": 0.6425195932388306, "learning_rate": 1.2801000000000002e-05, "loss": 0.0327, "step": 4270 }, { "epoch": 7.880886426592798, "grad_norm": 0.7910063862800598, "learning_rate": 1.2804000000000001e-05, "loss": 0.0423, "step": 4271 }, { "epoch": 7.882733148661126, "grad_norm": 0.648790180683136, "learning_rate": 1.2807000000000001e-05, "loss": 0.061, "step": 4272 }, { "epoch": 7.884579870729455, "grad_norm": 0.8567507266998291, "learning_rate": 1.281e-05, "loss": 0.0444, "step": 4273 }, { "epoch": 7.886426592797784, "grad_norm": 0.44043436646461487, "learning_rate": 1.2812999999999999e-05, "loss": 0.029, "step": 4274 }, { "epoch": 7.888273314866113, "grad_norm": 0.7219623923301697, "learning_rate": 1.2816e-05, "loss": 0.0619, "step": 4275 }, { "epoch": 7.890120036934442, "grad_norm": 0.4809543192386627, "learning_rate": 1.2819e-05, "loss": 0.0396, "step": 4276 }, { "epoch": 7.89196675900277, "grad_norm": 0.48918649554252625, "learning_rate": 1.2822e-05, "loss": 0.0316, "step": 4277 }, { "epoch": 7.893813481071099, "grad_norm": 0.7103901505470276, "learning_rate": 1.2825e-05, "loss": 0.0436, "step": 4278 }, { "epoch": 7.895660203139427, "grad_norm": 1.1335327625274658, "learning_rate": 1.2828e-05, "loss": 0.042, "step": 4279 }, { "epoch": 7.8975069252077565, "grad_norm": 0.5287763476371765, "learning_rate": 1.2831000000000001e-05, "loss": 0.028, "step": 4280 }, { "epoch": 7.899353647276085, "grad_norm": 0.5864327549934387, "learning_rate": 1.2834000000000001e-05, "loss": 0.0476, "step": 4281 }, { "epoch": 7.901200369344414, "grad_norm": 0.7537019848823547, "learning_rate": 1.2837000000000001e-05, "loss": 0.0594, "step": 4282 }, { "epoch": 7.903047091412742, "grad_norm": 0.7733783721923828, "learning_rate": 1.284e-05, "loss": 0.0467, "step": 4283 }, { "epoch": 7.904893813481071, "grad_norm": 0.9843820929527283, "learning_rate": 1.2843e-05, "loss": 0.0616, "step": 4284 }, { "epoch": 7.9067405355494, "grad_norm": 0.5933932065963745, "learning_rate": 1.2846e-05, "loss": 0.0397, "step": 4285 }, { "epoch": 7.908587257617729, "grad_norm": 0.7289738059043884, "learning_rate": 1.2849e-05, "loss": 0.0563, "step": 4286 }, { "epoch": 7.910433979686057, "grad_norm": 0.6274788975715637, "learning_rate": 1.2852e-05, "loss": 0.0424, "step": 4287 }, { "epoch": 7.912280701754386, "grad_norm": 0.8971357941627502, "learning_rate": 1.2855e-05, "loss": 0.0607, "step": 4288 }, { "epoch": 7.914127423822714, "grad_norm": 0.6123123168945312, "learning_rate": 1.2858e-05, "loss": 0.0335, "step": 4289 }, { "epoch": 7.9159741458910435, "grad_norm": 0.6328318119049072, "learning_rate": 1.2861000000000001e-05, "loss": 0.0378, "step": 4290 }, { "epoch": 7.917820867959372, "grad_norm": 0.6868095993995667, "learning_rate": 1.2864e-05, "loss": 0.0554, "step": 4291 }, { "epoch": 7.919667590027701, "grad_norm": 0.9725096225738525, "learning_rate": 1.2867e-05, "loss": 0.0695, "step": 4292 }, { "epoch": 7.921514312096029, "grad_norm": 0.8192182183265686, "learning_rate": 1.287e-05, "loss": 0.0351, "step": 4293 }, { "epoch": 7.923361034164358, "grad_norm": 0.8341385722160339, "learning_rate": 1.2873e-05, "loss": 0.0802, "step": 4294 }, { "epoch": 7.9252077562326875, "grad_norm": 1.3568755388259888, "learning_rate": 1.2876000000000002e-05, "loss": 0.4823, "step": 4295 }, { "epoch": 7.927054478301016, "grad_norm": 1.1360949277877808, "learning_rate": 1.2879000000000002e-05, "loss": 0.3611, "step": 4296 }, { "epoch": 7.928901200369344, "grad_norm": 0.790391743183136, "learning_rate": 1.2882e-05, "loss": 0.1874, "step": 4297 }, { "epoch": 7.930747922437673, "grad_norm": 0.743624746799469, "learning_rate": 1.2885e-05, "loss": 0.172, "step": 4298 }, { "epoch": 7.932594644506002, "grad_norm": 0.6538652181625366, "learning_rate": 1.2888e-05, "loss": 0.1649, "step": 4299 }, { "epoch": 7.9344413665743305, "grad_norm": 0.8588427901268005, "learning_rate": 1.2891e-05, "loss": 0.1684, "step": 4300 }, { "epoch": 7.93628808864266, "grad_norm": 0.6219055652618408, "learning_rate": 1.2894e-05, "loss": 0.1309, "step": 4301 }, { "epoch": 7.938134810710988, "grad_norm": 0.6097257137298584, "learning_rate": 1.2897e-05, "loss": 0.1437, "step": 4302 }, { "epoch": 7.939981532779317, "grad_norm": 0.6845511794090271, "learning_rate": 1.29e-05, "loss": 0.148, "step": 4303 }, { "epoch": 7.941828254847645, "grad_norm": 0.8553453087806702, "learning_rate": 1.2903e-05, "loss": 0.1539, "step": 4304 }, { "epoch": 7.9436749769159745, "grad_norm": 0.8057902455329895, "learning_rate": 1.2906000000000001e-05, "loss": 0.1407, "step": 4305 }, { "epoch": 7.945521698984303, "grad_norm": 0.7886427640914917, "learning_rate": 1.2909000000000001e-05, "loss": 0.1285, "step": 4306 }, { "epoch": 7.947368421052632, "grad_norm": 0.5424556732177734, "learning_rate": 1.2912000000000001e-05, "loss": 0.0449, "step": 4307 }, { "epoch": 7.94921514312096, "grad_norm": 0.8618884086608887, "learning_rate": 1.2915000000000001e-05, "loss": 0.0979, "step": 4308 }, { "epoch": 7.951061865189289, "grad_norm": 0.5419620275497437, "learning_rate": 1.2917999999999999e-05, "loss": 0.0577, "step": 4309 }, { "epoch": 7.9529085872576175, "grad_norm": 0.9529350996017456, "learning_rate": 1.2921e-05, "loss": 0.0946, "step": 4310 }, { "epoch": 7.954755309325947, "grad_norm": 0.6171501278877258, "learning_rate": 1.2924e-05, "loss": 0.055, "step": 4311 }, { "epoch": 7.956602031394275, "grad_norm": 0.7650995850563049, "learning_rate": 1.2927e-05, "loss": 0.0598, "step": 4312 }, { "epoch": 7.958448753462604, "grad_norm": 0.5648701190948486, "learning_rate": 1.293e-05, "loss": 0.0758, "step": 4313 }, { "epoch": 7.960295475530932, "grad_norm": 1.0188759565353394, "learning_rate": 1.2933e-05, "loss": 0.0477, "step": 4314 }, { "epoch": 7.9621421975992615, "grad_norm": 0.4948999583721161, "learning_rate": 1.2936000000000001e-05, "loss": 0.0379, "step": 4315 }, { "epoch": 7.96398891966759, "grad_norm": 0.6018174290657043, "learning_rate": 1.2939000000000001e-05, "loss": 0.0388, "step": 4316 }, { "epoch": 7.965835641735919, "grad_norm": 1.477062463760376, "learning_rate": 1.2942e-05, "loss": 0.0588, "step": 4317 }, { "epoch": 7.967682363804247, "grad_norm": 0.666613757610321, "learning_rate": 1.2945e-05, "loss": 0.044, "step": 4318 }, { "epoch": 7.969529085872576, "grad_norm": 0.6991389989852905, "learning_rate": 1.2948e-05, "loss": 0.0346, "step": 4319 }, { "epoch": 7.971375807940905, "grad_norm": 0.8860199451446533, "learning_rate": 1.2951e-05, "loss": 0.0211, "step": 4320 }, { "epoch": 7.973222530009234, "grad_norm": 1.3285048007965088, "learning_rate": 1.2954000000000002e-05, "loss": 0.0375, "step": 4321 }, { "epoch": 7.975069252077562, "grad_norm": 0.5883099436759949, "learning_rate": 1.2957e-05, "loss": 0.034, "step": 4322 }, { "epoch": 7.976915974145891, "grad_norm": 0.7883331179618835, "learning_rate": 1.296e-05, "loss": 0.0376, "step": 4323 }, { "epoch": 7.97876269621422, "grad_norm": 1.2550441026687622, "learning_rate": 1.2963e-05, "loss": 0.0627, "step": 4324 }, { "epoch": 7.980609418282548, "grad_norm": 0.7064722180366516, "learning_rate": 1.2966e-05, "loss": 0.0626, "step": 4325 }, { "epoch": 7.982456140350877, "grad_norm": 0.9073736071586609, "learning_rate": 1.2969e-05, "loss": 0.066, "step": 4326 }, { "epoch": 7.984302862419206, "grad_norm": 0.7056010961532593, "learning_rate": 1.2972e-05, "loss": 0.0372, "step": 4327 }, { "epoch": 7.986149584487535, "grad_norm": 0.6290423274040222, "learning_rate": 1.2975e-05, "loss": 0.0393, "step": 4328 }, { "epoch": 7.987996306555863, "grad_norm": 1.1520283222198486, "learning_rate": 1.2978e-05, "loss": 0.0428, "step": 4329 }, { "epoch": 7.989843028624192, "grad_norm": 0.9760458469390869, "learning_rate": 1.2981e-05, "loss": 0.0663, "step": 4330 }, { "epoch": 7.991689750692521, "grad_norm": 0.6979522109031677, "learning_rate": 1.2984000000000001e-05, "loss": 0.0335, "step": 4331 }, { "epoch": 7.99353647276085, "grad_norm": 0.6798151731491089, "learning_rate": 1.2987000000000001e-05, "loss": 0.0329, "step": 4332 }, { "epoch": 7.995383194829178, "grad_norm": 0.6956556439399719, "learning_rate": 1.2990000000000001e-05, "loss": 0.0393, "step": 4333 }, { "epoch": 7.997229916897507, "grad_norm": 0.7906112670898438, "learning_rate": 1.2992999999999999e-05, "loss": 0.0387, "step": 4334 }, { "epoch": 7.999076638965835, "grad_norm": 1.0580320358276367, "learning_rate": 1.2995999999999999e-05, "loss": 0.0529, "step": 4335 }, { "epoch": 8.0, "grad_norm": 0.4188879132270813, "learning_rate": 1.2999e-05, "loss": 0.0076, "step": 4336 }, { "epoch": 8.00184672206833, "grad_norm": 1.5326043367385864, "learning_rate": 1.3002e-05, "loss": 0.3817, "step": 4337 }, { "epoch": 8.003693444136658, "grad_norm": 1.1512473821640015, "learning_rate": 1.3005e-05, "loss": 0.3305, "step": 4338 }, { "epoch": 8.005540166204986, "grad_norm": 0.9963605999946594, "learning_rate": 1.3008e-05, "loss": 0.26, "step": 4339 }, { "epoch": 8.007386888273315, "grad_norm": 0.8791415691375732, "learning_rate": 1.3011e-05, "loss": 0.1898, "step": 4340 }, { "epoch": 8.009233610341644, "grad_norm": 1.1535075902938843, "learning_rate": 1.3014000000000001e-05, "loss": 0.2692, "step": 4341 }, { "epoch": 8.011080332409973, "grad_norm": 0.983793318271637, "learning_rate": 1.3017000000000001e-05, "loss": 0.2331, "step": 4342 }, { "epoch": 8.0129270544783, "grad_norm": 0.7149614095687866, "learning_rate": 1.302e-05, "loss": 0.183, "step": 4343 }, { "epoch": 8.01477377654663, "grad_norm": 0.6314206719398499, "learning_rate": 1.3023e-05, "loss": 0.1293, "step": 4344 }, { "epoch": 8.016620498614959, "grad_norm": 0.8021345734596252, "learning_rate": 1.3026e-05, "loss": 0.136, "step": 4345 }, { "epoch": 8.018467220683288, "grad_norm": 0.5998855233192444, "learning_rate": 1.3029e-05, "loss": 0.0968, "step": 4346 }, { "epoch": 8.020313942751615, "grad_norm": 0.6993886232376099, "learning_rate": 1.3032e-05, "loss": 0.1002, "step": 4347 }, { "epoch": 8.022160664819944, "grad_norm": 0.9774983525276184, "learning_rate": 1.3035e-05, "loss": 0.1381, "step": 4348 }, { "epoch": 8.024007386888274, "grad_norm": 0.7680040001869202, "learning_rate": 1.3038e-05, "loss": 0.0921, "step": 4349 }, { "epoch": 8.025854108956603, "grad_norm": 0.9355478882789612, "learning_rate": 1.3041e-05, "loss": 0.1065, "step": 4350 }, { "epoch": 8.02770083102493, "grad_norm": 0.7027696967124939, "learning_rate": 1.3044e-05, "loss": 0.0703, "step": 4351 }, { "epoch": 8.02954755309326, "grad_norm": 1.3707549571990967, "learning_rate": 1.3047e-05, "loss": 0.1136, "step": 4352 }, { "epoch": 8.031394275161588, "grad_norm": 0.7237825393676758, "learning_rate": 1.305e-05, "loss": 0.082, "step": 4353 }, { "epoch": 8.033240997229917, "grad_norm": 0.6745184659957886, "learning_rate": 1.3053e-05, "loss": 0.054, "step": 4354 }, { "epoch": 8.035087719298245, "grad_norm": 0.8653813600540161, "learning_rate": 1.3056e-05, "loss": 0.0645, "step": 4355 }, { "epoch": 8.036934441366574, "grad_norm": 0.7573667764663696, "learning_rate": 1.3059000000000002e-05, "loss": 0.0388, "step": 4356 }, { "epoch": 8.038781163434903, "grad_norm": 0.4688849151134491, "learning_rate": 1.3062000000000001e-05, "loss": 0.0368, "step": 4357 }, { "epoch": 8.040627885503232, "grad_norm": 0.8460974097251892, "learning_rate": 1.3065000000000001e-05, "loss": 0.051, "step": 4358 }, { "epoch": 8.04247460757156, "grad_norm": 0.6380207538604736, "learning_rate": 1.3068e-05, "loss": 0.0557, "step": 4359 }, { "epoch": 8.044321329639889, "grad_norm": 0.3948462903499603, "learning_rate": 1.3070999999999999e-05, "loss": 0.022, "step": 4360 }, { "epoch": 8.046168051708218, "grad_norm": 0.6261285543441772, "learning_rate": 1.3074e-05, "loss": 0.0437, "step": 4361 }, { "epoch": 8.048014773776547, "grad_norm": 0.6887944936752319, "learning_rate": 1.3077e-05, "loss": 0.0436, "step": 4362 }, { "epoch": 8.049861495844876, "grad_norm": 0.49890008568763733, "learning_rate": 1.308e-05, "loss": 0.0348, "step": 4363 }, { "epoch": 8.051708217913204, "grad_norm": 0.5046862363815308, "learning_rate": 1.3083e-05, "loss": 0.0288, "step": 4364 }, { "epoch": 8.053554939981533, "grad_norm": 0.6013908386230469, "learning_rate": 1.3086e-05, "loss": 0.0362, "step": 4365 }, { "epoch": 8.055401662049862, "grad_norm": 1.3356207609176636, "learning_rate": 1.3089000000000001e-05, "loss": 0.0907, "step": 4366 }, { "epoch": 8.057248384118191, "grad_norm": 0.937406599521637, "learning_rate": 1.3092000000000001e-05, "loss": 0.0265, "step": 4367 }, { "epoch": 8.059095106186518, "grad_norm": 0.48784348368644714, "learning_rate": 1.3095e-05, "loss": 0.0651, "step": 4368 }, { "epoch": 8.060941828254848, "grad_norm": 0.6230420470237732, "learning_rate": 1.3098e-05, "loss": 0.0523, "step": 4369 }, { "epoch": 8.062788550323177, "grad_norm": 0.4827072024345398, "learning_rate": 1.3101e-05, "loss": 0.023, "step": 4370 }, { "epoch": 8.064635272391506, "grad_norm": 0.40178000926971436, "learning_rate": 1.3104e-05, "loss": 0.0281, "step": 4371 }, { "epoch": 8.066481994459833, "grad_norm": 0.5730307102203369, "learning_rate": 1.3107e-05, "loss": 0.0431, "step": 4372 }, { "epoch": 8.068328716528162, "grad_norm": 0.5679459571838379, "learning_rate": 1.311e-05, "loss": 0.0207, "step": 4373 }, { "epoch": 8.070175438596491, "grad_norm": 0.4760940372943878, "learning_rate": 1.3113e-05, "loss": 0.0386, "step": 4374 }, { "epoch": 8.07202216066482, "grad_norm": 0.8767454028129578, "learning_rate": 1.3116e-05, "loss": 0.0333, "step": 4375 }, { "epoch": 8.073868882733148, "grad_norm": 1.0410170555114746, "learning_rate": 1.3119000000000001e-05, "loss": 0.0701, "step": 4376 }, { "epoch": 8.075715604801477, "grad_norm": 0.7946456670761108, "learning_rate": 1.3122e-05, "loss": 0.0537, "step": 4377 }, { "epoch": 8.077562326869806, "grad_norm": 0.7042535543441772, "learning_rate": 1.3125e-05, "loss": 0.0382, "step": 4378 }, { "epoch": 8.079409048938135, "grad_norm": 0.6931938529014587, "learning_rate": 1.3128e-05, "loss": 0.0422, "step": 4379 }, { "epoch": 8.081255771006463, "grad_norm": 0.4324564039707184, "learning_rate": 1.3131e-05, "loss": 0.0203, "step": 4380 }, { "epoch": 8.083102493074792, "grad_norm": 0.7083581686019897, "learning_rate": 1.3134000000000002e-05, "loss": 0.0372, "step": 4381 }, { "epoch": 8.084949215143121, "grad_norm": 0.7215295433998108, "learning_rate": 1.3137000000000001e-05, "loss": 0.0263, "step": 4382 }, { "epoch": 8.08679593721145, "grad_norm": 0.36402127146720886, "learning_rate": 1.314e-05, "loss": 0.0205, "step": 4383 }, { "epoch": 8.088642659279778, "grad_norm": 0.4735167920589447, "learning_rate": 1.3143e-05, "loss": 0.0366, "step": 4384 }, { "epoch": 8.090489381348107, "grad_norm": 0.7522518038749695, "learning_rate": 1.3146e-05, "loss": 0.0374, "step": 4385 }, { "epoch": 8.092336103416436, "grad_norm": 2.186006784439087, "learning_rate": 1.3149e-05, "loss": 0.0495, "step": 4386 }, { "epoch": 8.094182825484765, "grad_norm": 1.0902612209320068, "learning_rate": 1.3152e-05, "loss": 0.284, "step": 4387 }, { "epoch": 8.096029547553094, "grad_norm": 0.8116087913513184, "learning_rate": 1.3155e-05, "loss": 0.2475, "step": 4388 }, { "epoch": 8.097876269621421, "grad_norm": 0.6619287133216858, "learning_rate": 1.3158e-05, "loss": 0.2024, "step": 4389 }, { "epoch": 8.09972299168975, "grad_norm": 0.7266683578491211, "learning_rate": 1.3161e-05, "loss": 0.1673, "step": 4390 }, { "epoch": 8.10156971375808, "grad_norm": 1.3175115585327148, "learning_rate": 1.3164000000000001e-05, "loss": 0.161, "step": 4391 }, { "epoch": 8.103416435826409, "grad_norm": 1.0672887563705444, "learning_rate": 1.3167000000000001e-05, "loss": 0.2241, "step": 4392 }, { "epoch": 8.105263157894736, "grad_norm": 0.9696524143218994, "learning_rate": 1.3170000000000001e-05, "loss": 0.1349, "step": 4393 }, { "epoch": 8.107109879963065, "grad_norm": 0.6774873733520508, "learning_rate": 1.3173e-05, "loss": 0.1617, "step": 4394 }, { "epoch": 8.108956602031395, "grad_norm": 1.2551002502441406, "learning_rate": 1.3175999999999999e-05, "loss": 0.2438, "step": 4395 }, { "epoch": 8.110803324099724, "grad_norm": 0.8130558133125305, "learning_rate": 1.3179e-05, "loss": 0.136, "step": 4396 }, { "epoch": 8.112650046168051, "grad_norm": 0.7603598833084106, "learning_rate": 1.3182e-05, "loss": 0.1235, "step": 4397 }, { "epoch": 8.11449676823638, "grad_norm": 0.7122944593429565, "learning_rate": 1.3185e-05, "loss": 0.1325, "step": 4398 }, { "epoch": 8.11634349030471, "grad_norm": 0.7684652805328369, "learning_rate": 1.3188e-05, "loss": 0.1338, "step": 4399 }, { "epoch": 8.118190212373039, "grad_norm": 0.7754552960395813, "learning_rate": 1.3191e-05, "loss": 0.1212, "step": 4400 }, { "epoch": 8.120036934441366, "grad_norm": 0.6043549180030823, "learning_rate": 1.3194000000000001e-05, "loss": 0.0441, "step": 4401 }, { "epoch": 8.121883656509695, "grad_norm": 0.5200878977775574, "learning_rate": 1.3197000000000001e-05, "loss": 0.0658, "step": 4402 }, { "epoch": 8.123730378578024, "grad_norm": 0.8166391253471375, "learning_rate": 1.32e-05, "loss": 0.0747, "step": 4403 }, { "epoch": 8.125577100646353, "grad_norm": 0.5633155107498169, "learning_rate": 1.3203e-05, "loss": 0.0467, "step": 4404 }, { "epoch": 8.12742382271468, "grad_norm": 0.66031813621521, "learning_rate": 1.3206e-05, "loss": 0.0333, "step": 4405 }, { "epoch": 8.12927054478301, "grad_norm": 1.0485261678695679, "learning_rate": 1.3209000000000002e-05, "loss": 0.0641, "step": 4406 }, { "epoch": 8.131117266851339, "grad_norm": 1.0621566772460938, "learning_rate": 1.3212000000000002e-05, "loss": 0.0357, "step": 4407 }, { "epoch": 8.132963988919668, "grad_norm": 0.6026491522789001, "learning_rate": 1.3215e-05, "loss": 0.0408, "step": 4408 }, { "epoch": 8.134810710987995, "grad_norm": 0.8135718703269958, "learning_rate": 1.3218e-05, "loss": 0.0391, "step": 4409 }, { "epoch": 8.136657433056325, "grad_norm": 0.5995201468467712, "learning_rate": 1.3221e-05, "loss": 0.0317, "step": 4410 }, { "epoch": 8.138504155124654, "grad_norm": 0.5987685322761536, "learning_rate": 1.3224e-05, "loss": 0.0354, "step": 4411 }, { "epoch": 8.140350877192983, "grad_norm": 0.5825679302215576, "learning_rate": 1.3227e-05, "loss": 0.0334, "step": 4412 }, { "epoch": 8.142197599261312, "grad_norm": 0.8200697898864746, "learning_rate": 1.323e-05, "loss": 0.0384, "step": 4413 }, { "epoch": 8.14404432132964, "grad_norm": 0.46164023876190186, "learning_rate": 1.3233e-05, "loss": 0.0259, "step": 4414 }, { "epoch": 8.145891043397969, "grad_norm": 0.7400566935539246, "learning_rate": 1.3236e-05, "loss": 0.0422, "step": 4415 }, { "epoch": 8.147737765466298, "grad_norm": 0.600922703742981, "learning_rate": 1.3239000000000001e-05, "loss": 0.0329, "step": 4416 }, { "epoch": 8.149584487534627, "grad_norm": 0.5295440554618835, "learning_rate": 1.3242000000000001e-05, "loss": 0.0308, "step": 4417 }, { "epoch": 8.151431209602954, "grad_norm": 0.8074960112571716, "learning_rate": 1.3245000000000001e-05, "loss": 0.0493, "step": 4418 }, { "epoch": 8.153277931671283, "grad_norm": 0.8136438131332397, "learning_rate": 1.3248000000000001e-05, "loss": 0.0399, "step": 4419 }, { "epoch": 8.155124653739612, "grad_norm": 0.4094987213611603, "learning_rate": 1.3250999999999999e-05, "loss": 0.031, "step": 4420 }, { "epoch": 8.156971375807942, "grad_norm": 0.5699203014373779, "learning_rate": 1.3254e-05, "loss": 0.0375, "step": 4421 }, { "epoch": 8.158818097876269, "grad_norm": 2.046877861022949, "learning_rate": 1.3257e-05, "loss": 0.0254, "step": 4422 }, { "epoch": 8.160664819944598, "grad_norm": 0.6146877408027649, "learning_rate": 1.326e-05, "loss": 0.0315, "step": 4423 }, { "epoch": 8.162511542012927, "grad_norm": 0.7275285124778748, "learning_rate": 1.3263e-05, "loss": 0.0254, "step": 4424 }, { "epoch": 8.164358264081256, "grad_norm": 0.54784095287323, "learning_rate": 1.3266e-05, "loss": 0.0245, "step": 4425 }, { "epoch": 8.166204986149584, "grad_norm": 0.7300387024879456, "learning_rate": 1.3269000000000001e-05, "loss": 0.0336, "step": 4426 }, { "epoch": 8.168051708217913, "grad_norm": 0.9745352864265442, "learning_rate": 1.3272000000000001e-05, "loss": 0.0493, "step": 4427 }, { "epoch": 8.169898430286242, "grad_norm": 1.2098708152770996, "learning_rate": 1.3275e-05, "loss": 0.0445, "step": 4428 }, { "epoch": 8.171745152354571, "grad_norm": 0.39160364866256714, "learning_rate": 1.3278e-05, "loss": 0.0188, "step": 4429 }, { "epoch": 8.173591874422899, "grad_norm": 0.7158820033073425, "learning_rate": 1.3281e-05, "loss": 0.0272, "step": 4430 }, { "epoch": 8.175438596491228, "grad_norm": 0.6653376817703247, "learning_rate": 1.3284000000000002e-05, "loss": 0.0292, "step": 4431 }, { "epoch": 8.177285318559557, "grad_norm": 0.5458754301071167, "learning_rate": 1.3287e-05, "loss": 0.0275, "step": 4432 }, { "epoch": 8.179132040627886, "grad_norm": 1.1866546869277954, "learning_rate": 1.329e-05, "loss": 0.0475, "step": 4433 }, { "epoch": 8.180978762696213, "grad_norm": 0.9386422634124756, "learning_rate": 1.3293e-05, "loss": 0.0426, "step": 4434 }, { "epoch": 8.182825484764543, "grad_norm": 1.0976414680480957, "learning_rate": 1.3296e-05, "loss": 0.0386, "step": 4435 }, { "epoch": 8.184672206832872, "grad_norm": 0.8207219243049622, "learning_rate": 1.3299000000000001e-05, "loss": 0.0349, "step": 4436 }, { "epoch": 8.1865189289012, "grad_norm": 1.59428870677948, "learning_rate": 1.3302e-05, "loss": 0.3254, "step": 4437 }, { "epoch": 8.18836565096953, "grad_norm": 1.0320184230804443, "learning_rate": 1.3305e-05, "loss": 0.2136, "step": 4438 }, { "epoch": 8.190212373037857, "grad_norm": 0.8654654622077942, "learning_rate": 1.3308e-05, "loss": 0.1964, "step": 4439 }, { "epoch": 8.192059095106186, "grad_norm": 0.7566434741020203, "learning_rate": 1.3311e-05, "loss": 0.2189, "step": 4440 }, { "epoch": 8.193905817174516, "grad_norm": 0.9181260466575623, "learning_rate": 1.3314e-05, "loss": 0.269, "step": 4441 }, { "epoch": 8.195752539242845, "grad_norm": 0.7232855558395386, "learning_rate": 1.3317000000000001e-05, "loss": 0.1816, "step": 4442 }, { "epoch": 8.197599261311172, "grad_norm": 0.8786792159080505, "learning_rate": 1.3320000000000001e-05, "loss": 0.1963, "step": 4443 }, { "epoch": 8.199445983379501, "grad_norm": 0.8025330901145935, "learning_rate": 1.3323000000000001e-05, "loss": 0.1538, "step": 4444 }, { "epoch": 8.20129270544783, "grad_norm": 1.081365704536438, "learning_rate": 1.3325999999999999e-05, "loss": 0.1918, "step": 4445 }, { "epoch": 8.20313942751616, "grad_norm": 0.6874704360961914, "learning_rate": 1.3328999999999999e-05, "loss": 0.0882, "step": 4446 }, { "epoch": 8.204986149584487, "grad_norm": 0.6587651968002319, "learning_rate": 1.3332e-05, "loss": 0.1124, "step": 4447 }, { "epoch": 8.206832871652816, "grad_norm": 0.6518075466156006, "learning_rate": 1.3335e-05, "loss": 0.1039, "step": 4448 }, { "epoch": 8.208679593721145, "grad_norm": 0.8533167243003845, "learning_rate": 1.3338e-05, "loss": 0.1805, "step": 4449 }, { "epoch": 8.210526315789474, "grad_norm": 0.5440764427185059, "learning_rate": 1.3341e-05, "loss": 0.0368, "step": 4450 }, { "epoch": 8.212373037857802, "grad_norm": 0.7944900393486023, "learning_rate": 1.3344e-05, "loss": 0.1022, "step": 4451 }, { "epoch": 8.21421975992613, "grad_norm": 0.55717933177948, "learning_rate": 1.3347000000000001e-05, "loss": 0.0471, "step": 4452 }, { "epoch": 8.21606648199446, "grad_norm": 0.6322590112686157, "learning_rate": 1.3350000000000001e-05, "loss": 0.0747, "step": 4453 }, { "epoch": 8.21791320406279, "grad_norm": 0.7431051135063171, "learning_rate": 1.3353e-05, "loss": 0.0504, "step": 4454 }, { "epoch": 8.219759926131117, "grad_norm": 0.4595365524291992, "learning_rate": 1.3356e-05, "loss": 0.0277, "step": 4455 }, { "epoch": 8.221606648199446, "grad_norm": 0.5381754040718079, "learning_rate": 1.3359e-05, "loss": 0.0631, "step": 4456 }, { "epoch": 8.223453370267775, "grad_norm": 0.5509288311004639, "learning_rate": 1.3362e-05, "loss": 0.0244, "step": 4457 }, { "epoch": 8.225300092336104, "grad_norm": 0.733174204826355, "learning_rate": 1.3365e-05, "loss": 0.0437, "step": 4458 }, { "epoch": 8.227146814404431, "grad_norm": 0.7149167656898499, "learning_rate": 1.3368e-05, "loss": 0.0451, "step": 4459 }, { "epoch": 8.22899353647276, "grad_norm": 0.6295753121376038, "learning_rate": 1.3371e-05, "loss": 0.0375, "step": 4460 }, { "epoch": 8.23084025854109, "grad_norm": 0.4743589460849762, "learning_rate": 1.3374e-05, "loss": 0.0344, "step": 4461 }, { "epoch": 8.232686980609419, "grad_norm": 0.7781702280044556, "learning_rate": 1.3377e-05, "loss": 0.0464, "step": 4462 }, { "epoch": 8.234533702677748, "grad_norm": 0.48906221985816956, "learning_rate": 1.338e-05, "loss": 0.0272, "step": 4463 }, { "epoch": 8.236380424746075, "grad_norm": 0.4372539818286896, "learning_rate": 1.3383e-05, "loss": 0.0324, "step": 4464 }, { "epoch": 8.238227146814404, "grad_norm": 0.47455981373786926, "learning_rate": 1.3386e-05, "loss": 0.0229, "step": 4465 }, { "epoch": 8.240073868882734, "grad_norm": 0.5755661725997925, "learning_rate": 1.3389e-05, "loss": 0.0272, "step": 4466 }, { "epoch": 8.241920590951063, "grad_norm": 0.5248891115188599, "learning_rate": 1.3392000000000002e-05, "loss": 0.0295, "step": 4467 }, { "epoch": 8.24376731301939, "grad_norm": 0.47399458289146423, "learning_rate": 1.3395000000000001e-05, "loss": 0.0272, "step": 4468 }, { "epoch": 8.24561403508772, "grad_norm": 0.6420528292655945, "learning_rate": 1.3398e-05, "loss": 0.0317, "step": 4469 }, { "epoch": 8.247460757156048, "grad_norm": 0.5973543524742126, "learning_rate": 1.3401e-05, "loss": 0.028, "step": 4470 }, { "epoch": 8.249307479224377, "grad_norm": 2.3570241928100586, "learning_rate": 1.3403999999999999e-05, "loss": 0.04, "step": 4471 }, { "epoch": 8.251154201292705, "grad_norm": 1.1083556413650513, "learning_rate": 1.3407e-05, "loss": 0.0303, "step": 4472 }, { "epoch": 8.253000923361034, "grad_norm": 1.0114208459854126, "learning_rate": 1.341e-05, "loss": 0.0331, "step": 4473 }, { "epoch": 8.254847645429363, "grad_norm": 0.3611949682235718, "learning_rate": 1.3413e-05, "loss": 0.0149, "step": 4474 }, { "epoch": 8.256694367497692, "grad_norm": 0.7732037901878357, "learning_rate": 1.3416e-05, "loss": 0.044, "step": 4475 }, { "epoch": 8.25854108956602, "grad_norm": 0.5984740257263184, "learning_rate": 1.3419e-05, "loss": 0.0373, "step": 4476 }, { "epoch": 8.260387811634349, "grad_norm": 0.6929849982261658, "learning_rate": 1.3422000000000001e-05, "loss": 0.0363, "step": 4477 }, { "epoch": 8.262234533702678, "grad_norm": 0.8012136816978455, "learning_rate": 1.3425000000000001e-05, "loss": 0.0339, "step": 4478 }, { "epoch": 8.264081255771007, "grad_norm": 1.2871159315109253, "learning_rate": 1.3428000000000001e-05, "loss": 0.0359, "step": 4479 }, { "epoch": 8.265927977839334, "grad_norm": 0.7725412845611572, "learning_rate": 1.3431e-05, "loss": 0.0367, "step": 4480 }, { "epoch": 8.267774699907664, "grad_norm": 0.8588116765022278, "learning_rate": 1.3433999999999999e-05, "loss": 0.0357, "step": 4481 }, { "epoch": 8.269621421975993, "grad_norm": 0.3712838888168335, "learning_rate": 1.3437e-05, "loss": 0.0265, "step": 4482 }, { "epoch": 8.271468144044322, "grad_norm": 0.7679174542427063, "learning_rate": 1.344e-05, "loss": 0.0434, "step": 4483 }, { "epoch": 8.27331486611265, "grad_norm": 0.5691342949867249, "learning_rate": 1.3443e-05, "loss": 0.0313, "step": 4484 }, { "epoch": 8.275161588180978, "grad_norm": 0.9302529692649841, "learning_rate": 1.3446e-05, "loss": 0.0326, "step": 4485 }, { "epoch": 8.277008310249307, "grad_norm": 0.6226106286048889, "learning_rate": 1.3449e-05, "loss": 0.0753, "step": 4486 }, { "epoch": 8.278855032317637, "grad_norm": 1.1870638132095337, "learning_rate": 1.3452000000000001e-05, "loss": 0.3242, "step": 4487 }, { "epoch": 8.280701754385966, "grad_norm": 1.0860295295715332, "learning_rate": 1.3455e-05, "loss": 0.3255, "step": 4488 }, { "epoch": 8.282548476454293, "grad_norm": 1.1221368312835693, "learning_rate": 1.3458e-05, "loss": 0.2718, "step": 4489 }, { "epoch": 8.284395198522622, "grad_norm": 0.7170649170875549, "learning_rate": 1.3461e-05, "loss": 0.1995, "step": 4490 }, { "epoch": 8.286241920590951, "grad_norm": 0.837647020816803, "learning_rate": 1.3464e-05, "loss": 0.2252, "step": 4491 }, { "epoch": 8.28808864265928, "grad_norm": 0.7650190591812134, "learning_rate": 1.3467000000000002e-05, "loss": 0.1505, "step": 4492 }, { "epoch": 8.289935364727608, "grad_norm": 1.1400846242904663, "learning_rate": 1.3470000000000001e-05, "loss": 0.1871, "step": 4493 }, { "epoch": 8.291782086795937, "grad_norm": 0.7779901623725891, "learning_rate": 1.3473e-05, "loss": 0.1292, "step": 4494 }, { "epoch": 8.293628808864266, "grad_norm": 0.8995106220245361, "learning_rate": 1.3476e-05, "loss": 0.1429, "step": 4495 }, { "epoch": 8.295475530932595, "grad_norm": 1.1236376762390137, "learning_rate": 1.3479e-05, "loss": 0.092, "step": 4496 }, { "epoch": 8.297322253000923, "grad_norm": 1.010858178138733, "learning_rate": 1.3482e-05, "loss": 0.1395, "step": 4497 }, { "epoch": 8.299168975069252, "grad_norm": 0.7352631092071533, "learning_rate": 1.3485e-05, "loss": 0.0927, "step": 4498 }, { "epoch": 8.301015697137581, "grad_norm": 0.6601068377494812, "learning_rate": 1.3488e-05, "loss": 0.0438, "step": 4499 }, { "epoch": 8.30286241920591, "grad_norm": 0.5581169724464417, "learning_rate": 1.3491e-05, "loss": 0.0904, "step": 4500 }, { "epoch": 8.304709141274238, "grad_norm": 0.9757201671600342, "learning_rate": 1.3494e-05, "loss": 0.0447, "step": 4501 }, { "epoch": 8.306555863342567, "grad_norm": 0.38367536664009094, "learning_rate": 1.3497000000000001e-05, "loss": 0.0359, "step": 4502 }, { "epoch": 8.308402585410896, "grad_norm": 0.3730805516242981, "learning_rate": 1.3500000000000001e-05, "loss": 0.0398, "step": 4503 }, { "epoch": 8.310249307479225, "grad_norm": 0.6186391115188599, "learning_rate": 1.3503000000000001e-05, "loss": 0.0255, "step": 4504 }, { "epoch": 8.312096029547552, "grad_norm": 0.5357188582420349, "learning_rate": 1.3506e-05, "loss": 0.0438, "step": 4505 }, { "epoch": 8.313942751615881, "grad_norm": 0.9559025764465332, "learning_rate": 1.3508999999999999e-05, "loss": 0.0319, "step": 4506 }, { "epoch": 8.31578947368421, "grad_norm": 0.9470544457435608, "learning_rate": 1.3512e-05, "loss": 0.087, "step": 4507 }, { "epoch": 8.31763619575254, "grad_norm": 0.44523105025291443, "learning_rate": 1.3515e-05, "loss": 0.031, "step": 4508 }, { "epoch": 8.319482917820867, "grad_norm": 0.49122002720832825, "learning_rate": 1.3518e-05, "loss": 0.0325, "step": 4509 }, { "epoch": 8.321329639889196, "grad_norm": 0.5576708316802979, "learning_rate": 1.3521e-05, "loss": 0.0338, "step": 4510 }, { "epoch": 8.323176361957525, "grad_norm": 0.8101223707199097, "learning_rate": 1.3524e-05, "loss": 0.0413, "step": 4511 }, { "epoch": 8.325023084025855, "grad_norm": 0.5844168066978455, "learning_rate": 1.3527000000000001e-05, "loss": 0.0344, "step": 4512 }, { "epoch": 8.326869806094184, "grad_norm": 0.4756903350353241, "learning_rate": 1.3530000000000001e-05, "loss": 0.0314, "step": 4513 }, { "epoch": 8.328716528162511, "grad_norm": 0.48679304122924805, "learning_rate": 1.3533e-05, "loss": 0.0369, "step": 4514 }, { "epoch": 8.33056325023084, "grad_norm": 1.2075324058532715, "learning_rate": 1.3536e-05, "loss": 0.0439, "step": 4515 }, { "epoch": 8.33240997229917, "grad_norm": 1.6481680870056152, "learning_rate": 1.3539e-05, "loss": 0.0377, "step": 4516 }, { "epoch": 8.334256694367498, "grad_norm": 0.4778039753437042, "learning_rate": 1.3542000000000002e-05, "loss": 0.0359, "step": 4517 }, { "epoch": 8.336103416435826, "grad_norm": 0.3544483482837677, "learning_rate": 1.3545e-05, "loss": 0.0168, "step": 4518 }, { "epoch": 8.337950138504155, "grad_norm": 0.9810410141944885, "learning_rate": 1.3548e-05, "loss": 0.0516, "step": 4519 }, { "epoch": 8.339796860572484, "grad_norm": 0.5142959356307983, "learning_rate": 1.3551e-05, "loss": 0.0326, "step": 4520 }, { "epoch": 8.341643582640813, "grad_norm": 0.7074347734451294, "learning_rate": 1.3554e-05, "loss": 0.0432, "step": 4521 }, { "epoch": 8.34349030470914, "grad_norm": 0.4334162473678589, "learning_rate": 1.3557e-05, "loss": 0.0162, "step": 4522 }, { "epoch": 8.34533702677747, "grad_norm": 0.6149067878723145, "learning_rate": 1.356e-05, "loss": 0.0588, "step": 4523 }, { "epoch": 8.347183748845799, "grad_norm": 0.4578056037425995, "learning_rate": 1.3563e-05, "loss": 0.0198, "step": 4524 }, { "epoch": 8.349030470914128, "grad_norm": 0.6646698117256165, "learning_rate": 1.3566e-05, "loss": 0.035, "step": 4525 }, { "epoch": 8.350877192982455, "grad_norm": 0.9544262886047363, "learning_rate": 1.3569e-05, "loss": 0.0309, "step": 4526 }, { "epoch": 8.352723915050785, "grad_norm": 0.9538107514381409, "learning_rate": 1.3572000000000002e-05, "loss": 0.0265, "step": 4527 }, { "epoch": 8.354570637119114, "grad_norm": 0.7917845845222473, "learning_rate": 1.3575000000000001e-05, "loss": 0.0433, "step": 4528 }, { "epoch": 8.356417359187443, "grad_norm": 0.6048826575279236, "learning_rate": 1.3578000000000001e-05, "loss": 0.0415, "step": 4529 }, { "epoch": 8.35826408125577, "grad_norm": 0.813008725643158, "learning_rate": 1.3581000000000001e-05, "loss": 0.0437, "step": 4530 }, { "epoch": 8.3601108033241, "grad_norm": 0.7485079169273376, "learning_rate": 1.3583999999999999e-05, "loss": 0.0411, "step": 4531 }, { "epoch": 8.361957525392429, "grad_norm": 0.6766875982284546, "learning_rate": 1.3587e-05, "loss": 0.0285, "step": 4532 }, { "epoch": 8.363804247460758, "grad_norm": 0.8954643607139587, "learning_rate": 1.359e-05, "loss": 0.0436, "step": 4533 }, { "epoch": 8.365650969529085, "grad_norm": 0.9389119148254395, "learning_rate": 1.3593e-05, "loss": 0.033, "step": 4534 }, { "epoch": 8.367497691597414, "grad_norm": 1.0599180459976196, "learning_rate": 1.3596e-05, "loss": 0.0418, "step": 4535 }, { "epoch": 8.369344413665743, "grad_norm": 1.466378092765808, "learning_rate": 1.3599e-05, "loss": 0.0693, "step": 4536 }, { "epoch": 8.371191135734072, "grad_norm": 1.0018821954727173, "learning_rate": 1.3602000000000001e-05, "loss": 0.2913, "step": 4537 }, { "epoch": 8.373037857802402, "grad_norm": 1.0712130069732666, "learning_rate": 1.3605000000000001e-05, "loss": 0.275, "step": 4538 }, { "epoch": 8.374884579870729, "grad_norm": 0.7662477493286133, "learning_rate": 1.3608e-05, "loss": 0.266, "step": 4539 }, { "epoch": 8.376731301939058, "grad_norm": 0.9335657358169556, "learning_rate": 1.3611e-05, "loss": 0.232, "step": 4540 }, { "epoch": 8.378578024007387, "grad_norm": 0.8165726661682129, "learning_rate": 1.3614e-05, "loss": 0.186, "step": 4541 }, { "epoch": 8.380424746075716, "grad_norm": 0.6568682193756104, "learning_rate": 1.3617000000000002e-05, "loss": 0.1721, "step": 4542 }, { "epoch": 8.382271468144044, "grad_norm": 0.779513955116272, "learning_rate": 1.362e-05, "loss": 0.1871, "step": 4543 }, { "epoch": 8.384118190212373, "grad_norm": 0.8209591507911682, "learning_rate": 1.3623e-05, "loss": 0.1078, "step": 4544 }, { "epoch": 8.385964912280702, "grad_norm": 0.8797643184661865, "learning_rate": 1.3626e-05, "loss": 0.1393, "step": 4545 }, { "epoch": 8.387811634349031, "grad_norm": 0.8661841154098511, "learning_rate": 1.3629e-05, "loss": 0.1451, "step": 4546 }, { "epoch": 8.389658356417359, "grad_norm": 0.5946820974349976, "learning_rate": 1.3632000000000001e-05, "loss": 0.081, "step": 4547 }, { "epoch": 8.391505078485688, "grad_norm": 1.008434534072876, "learning_rate": 1.3635e-05, "loss": 0.1038, "step": 4548 }, { "epoch": 8.393351800554017, "grad_norm": 0.6491675972938538, "learning_rate": 1.3638e-05, "loss": 0.075, "step": 4549 }, { "epoch": 8.395198522622346, "grad_norm": 0.585092306137085, "learning_rate": 1.3641e-05, "loss": 0.0561, "step": 4550 }, { "epoch": 8.397045244690673, "grad_norm": 0.5089346170425415, "learning_rate": 1.3644e-05, "loss": 0.0644, "step": 4551 }, { "epoch": 8.398891966759003, "grad_norm": 0.616301953792572, "learning_rate": 1.3647000000000002e-05, "loss": 0.0416, "step": 4552 }, { "epoch": 8.400738688827332, "grad_norm": 0.7324120998382568, "learning_rate": 1.3650000000000001e-05, "loss": 0.0344, "step": 4553 }, { "epoch": 8.40258541089566, "grad_norm": 0.462205171585083, "learning_rate": 1.3653000000000001e-05, "loss": 0.0295, "step": 4554 }, { "epoch": 8.404432132963988, "grad_norm": 1.396769404411316, "learning_rate": 1.3656e-05, "loss": 0.0589, "step": 4555 }, { "epoch": 8.406278855032317, "grad_norm": 0.5556825399398804, "learning_rate": 1.3659e-05, "loss": 0.0327, "step": 4556 }, { "epoch": 8.408125577100646, "grad_norm": 0.4635709226131439, "learning_rate": 1.3662e-05, "loss": 0.0248, "step": 4557 }, { "epoch": 8.409972299168976, "grad_norm": 0.39975813031196594, "learning_rate": 1.3665e-05, "loss": 0.0459, "step": 4558 }, { "epoch": 8.411819021237303, "grad_norm": 0.8045360445976257, "learning_rate": 1.3668e-05, "loss": 0.0371, "step": 4559 }, { "epoch": 8.413665743305632, "grad_norm": 0.6244034767150879, "learning_rate": 1.3671e-05, "loss": 0.0435, "step": 4560 }, { "epoch": 8.415512465373961, "grad_norm": 0.7523409724235535, "learning_rate": 1.3674e-05, "loss": 0.0568, "step": 4561 }, { "epoch": 8.41735918744229, "grad_norm": 1.0529162883758545, "learning_rate": 1.3677000000000001e-05, "loss": 0.0473, "step": 4562 }, { "epoch": 8.41920590951062, "grad_norm": 0.44346120953559875, "learning_rate": 1.3680000000000001e-05, "loss": 0.0243, "step": 4563 }, { "epoch": 8.421052631578947, "grad_norm": 0.27785372734069824, "learning_rate": 1.3683000000000001e-05, "loss": 0.0116, "step": 4564 }, { "epoch": 8.422899353647276, "grad_norm": 0.48640650510787964, "learning_rate": 1.3686e-05, "loss": 0.0312, "step": 4565 }, { "epoch": 8.424746075715605, "grad_norm": 0.8661382794380188, "learning_rate": 1.3689e-05, "loss": 0.096, "step": 4566 }, { "epoch": 8.426592797783934, "grad_norm": 0.6696983575820923, "learning_rate": 1.3691999999999999e-05, "loss": 0.0402, "step": 4567 }, { "epoch": 8.428439519852262, "grad_norm": 0.47121769189834595, "learning_rate": 1.3695e-05, "loss": 0.0309, "step": 4568 }, { "epoch": 8.43028624192059, "grad_norm": 0.5439361929893494, "learning_rate": 1.3698e-05, "loss": 0.0376, "step": 4569 }, { "epoch": 8.43213296398892, "grad_norm": 1.22900390625, "learning_rate": 1.3701e-05, "loss": 0.0461, "step": 4570 }, { "epoch": 8.43397968605725, "grad_norm": 0.6019194722175598, "learning_rate": 1.3704e-05, "loss": 0.0366, "step": 4571 }, { "epoch": 8.435826408125576, "grad_norm": 0.981429398059845, "learning_rate": 1.3707e-05, "loss": 0.0447, "step": 4572 }, { "epoch": 8.437673130193906, "grad_norm": 0.621160089969635, "learning_rate": 1.3710000000000001e-05, "loss": 0.0466, "step": 4573 }, { "epoch": 8.439519852262235, "grad_norm": 0.46072280406951904, "learning_rate": 1.3713e-05, "loss": 0.0297, "step": 4574 }, { "epoch": 8.441366574330564, "grad_norm": 0.8340686559677124, "learning_rate": 1.3716e-05, "loss": 0.0386, "step": 4575 }, { "epoch": 8.443213296398891, "grad_norm": 0.5266955494880676, "learning_rate": 1.3719e-05, "loss": 0.0314, "step": 4576 }, { "epoch": 8.44506001846722, "grad_norm": 0.485097199678421, "learning_rate": 1.3722e-05, "loss": 0.0282, "step": 4577 }, { "epoch": 8.44690674053555, "grad_norm": 0.617648184299469, "learning_rate": 1.3725000000000002e-05, "loss": 0.0341, "step": 4578 }, { "epoch": 8.448753462603879, "grad_norm": 0.7114557027816772, "learning_rate": 1.3728000000000001e-05, "loss": 0.037, "step": 4579 }, { "epoch": 8.450600184672206, "grad_norm": 0.8123380541801453, "learning_rate": 1.3731e-05, "loss": 0.0243, "step": 4580 }, { "epoch": 8.452446906740535, "grad_norm": 0.40049365162849426, "learning_rate": 1.3734e-05, "loss": 0.0187, "step": 4581 }, { "epoch": 8.454293628808864, "grad_norm": 0.8814468383789062, "learning_rate": 1.3736999999999999e-05, "loss": 0.0387, "step": 4582 }, { "epoch": 8.456140350877194, "grad_norm": 0.885870099067688, "learning_rate": 1.374e-05, "loss": 0.042, "step": 4583 }, { "epoch": 8.45798707294552, "grad_norm": 0.9987731575965881, "learning_rate": 1.3743e-05, "loss": 0.0392, "step": 4584 }, { "epoch": 8.45983379501385, "grad_norm": 0.6637702584266663, "learning_rate": 1.3746e-05, "loss": 0.037, "step": 4585 }, { "epoch": 8.46168051708218, "grad_norm": 0.9246504306793213, "learning_rate": 1.3749e-05, "loss": 0.0537, "step": 4586 }, { "epoch": 8.463527239150508, "grad_norm": 1.5685744285583496, "learning_rate": 1.3752e-05, "loss": 0.274, "step": 4587 }, { "epoch": 8.465373961218837, "grad_norm": 0.9505859017372131, "learning_rate": 1.3755000000000001e-05, "loss": 0.2991, "step": 4588 }, { "epoch": 8.467220683287165, "grad_norm": 0.8866581320762634, "learning_rate": 1.3758000000000001e-05, "loss": 0.2452, "step": 4589 }, { "epoch": 8.469067405355494, "grad_norm": 0.9090424180030823, "learning_rate": 1.3761000000000001e-05, "loss": 0.2359, "step": 4590 }, { "epoch": 8.470914127423823, "grad_norm": 0.9789994955062866, "learning_rate": 1.3764e-05, "loss": 0.2069, "step": 4591 }, { "epoch": 8.472760849492152, "grad_norm": 0.6089072227478027, "learning_rate": 1.3766999999999999e-05, "loss": 0.1521, "step": 4592 }, { "epoch": 8.47460757156048, "grad_norm": 0.5650318264961243, "learning_rate": 1.377e-05, "loss": 0.1246, "step": 4593 }, { "epoch": 8.476454293628809, "grad_norm": 0.7467601895332336, "learning_rate": 1.3773e-05, "loss": 0.157, "step": 4594 }, { "epoch": 8.478301015697138, "grad_norm": 1.8868144750595093, "learning_rate": 1.3776e-05, "loss": 0.1856, "step": 4595 }, { "epoch": 8.480147737765467, "grad_norm": 1.5848222970962524, "learning_rate": 1.3779e-05, "loss": 0.276, "step": 4596 }, { "epoch": 8.481994459833794, "grad_norm": 1.11818528175354, "learning_rate": 1.3782e-05, "loss": 0.0852, "step": 4597 }, { "epoch": 8.483841181902124, "grad_norm": 0.7371710538864136, "learning_rate": 1.3785000000000001e-05, "loss": 0.1073, "step": 4598 }, { "epoch": 8.485687903970453, "grad_norm": 0.9325425624847412, "learning_rate": 1.3788e-05, "loss": 0.1418, "step": 4599 }, { "epoch": 8.487534626038782, "grad_norm": 0.6657596826553345, "learning_rate": 1.3791e-05, "loss": 0.0736, "step": 4600 }, { "epoch": 8.48938134810711, "grad_norm": 0.5805692672729492, "learning_rate": 1.3794e-05, "loss": 0.0964, "step": 4601 }, { "epoch": 8.491228070175438, "grad_norm": 0.7410169243812561, "learning_rate": 1.3797e-05, "loss": 0.0827, "step": 4602 }, { "epoch": 8.493074792243767, "grad_norm": 0.5074139833450317, "learning_rate": 1.3800000000000002e-05, "loss": 0.042, "step": 4603 }, { "epoch": 8.494921514312097, "grad_norm": 0.4896104633808136, "learning_rate": 1.3803e-05, "loss": 0.0373, "step": 4604 }, { "epoch": 8.496768236380424, "grad_norm": 0.4625118374824524, "learning_rate": 1.3806e-05, "loss": 0.0454, "step": 4605 }, { "epoch": 8.498614958448753, "grad_norm": 0.5676958560943604, "learning_rate": 1.3809e-05, "loss": 0.0379, "step": 4606 }, { "epoch": 8.500461680517082, "grad_norm": 0.3820827901363373, "learning_rate": 1.3812e-05, "loss": 0.0202, "step": 4607 }, { "epoch": 8.502308402585411, "grad_norm": 1.2073335647583008, "learning_rate": 1.3815e-05, "loss": 0.0194, "step": 4608 }, { "epoch": 8.504155124653739, "grad_norm": 0.7418472170829773, "learning_rate": 1.3818e-05, "loss": 0.0414, "step": 4609 }, { "epoch": 8.506001846722068, "grad_norm": 0.5792127251625061, "learning_rate": 1.3821e-05, "loss": 0.0418, "step": 4610 }, { "epoch": 8.507848568790397, "grad_norm": 0.5523576736450195, "learning_rate": 1.3824e-05, "loss": 0.0542, "step": 4611 }, { "epoch": 8.509695290858726, "grad_norm": 0.4889757037162781, "learning_rate": 1.3827e-05, "loss": 0.0219, "step": 4612 }, { "epoch": 8.511542012927055, "grad_norm": 1.076654314994812, "learning_rate": 1.3830000000000001e-05, "loss": 0.0342, "step": 4613 }, { "epoch": 8.513388734995383, "grad_norm": 0.5193042755126953, "learning_rate": 1.3833000000000001e-05, "loss": 0.0266, "step": 4614 }, { "epoch": 8.515235457063712, "grad_norm": 0.437115341424942, "learning_rate": 1.3836000000000001e-05, "loss": 0.0335, "step": 4615 }, { "epoch": 8.517082179132041, "grad_norm": 0.41590920090675354, "learning_rate": 1.3839e-05, "loss": 0.0266, "step": 4616 }, { "epoch": 8.51892890120037, "grad_norm": 0.4554344117641449, "learning_rate": 1.3841999999999999e-05, "loss": 0.0204, "step": 4617 }, { "epoch": 8.520775623268698, "grad_norm": 0.5499839782714844, "learning_rate": 1.3845e-05, "loss": 0.0224, "step": 4618 }, { "epoch": 8.522622345337027, "grad_norm": 0.9251109957695007, "learning_rate": 1.3848e-05, "loss": 0.0334, "step": 4619 }, { "epoch": 8.524469067405356, "grad_norm": 0.5408797264099121, "learning_rate": 1.3851e-05, "loss": 0.0212, "step": 4620 }, { "epoch": 8.526315789473685, "grad_norm": 1.166687250137329, "learning_rate": 1.3854e-05, "loss": 0.0365, "step": 4621 }, { "epoch": 8.528162511542012, "grad_norm": 0.9165891408920288, "learning_rate": 1.3857e-05, "loss": 0.0382, "step": 4622 }, { "epoch": 8.530009233610341, "grad_norm": 1.4344496726989746, "learning_rate": 1.3860000000000001e-05, "loss": 0.0513, "step": 4623 }, { "epoch": 8.53185595567867, "grad_norm": 0.6101006865501404, "learning_rate": 1.3863000000000001e-05, "loss": 0.0363, "step": 4624 }, { "epoch": 8.533702677747, "grad_norm": 1.1394438743591309, "learning_rate": 1.3866e-05, "loss": 0.0316, "step": 4625 }, { "epoch": 8.535549399815327, "grad_norm": 0.918218195438385, "learning_rate": 1.3869e-05, "loss": 0.0425, "step": 4626 }, { "epoch": 8.537396121883656, "grad_norm": 0.9316399693489075, "learning_rate": 1.3872e-05, "loss": 0.0463, "step": 4627 }, { "epoch": 8.539242843951985, "grad_norm": 0.7699198722839355, "learning_rate": 1.3875000000000002e-05, "loss": 0.0452, "step": 4628 }, { "epoch": 8.541089566020315, "grad_norm": 0.7271835803985596, "learning_rate": 1.3878e-05, "loss": 0.0234, "step": 4629 }, { "epoch": 8.542936288088642, "grad_norm": 0.638312041759491, "learning_rate": 1.3881e-05, "loss": 0.0331, "step": 4630 }, { "epoch": 8.544783010156971, "grad_norm": 0.8469995856285095, "learning_rate": 1.3884e-05, "loss": 0.0491, "step": 4631 }, { "epoch": 8.5466297322253, "grad_norm": 0.7678780555725098, "learning_rate": 1.3887e-05, "loss": 0.0373, "step": 4632 }, { "epoch": 8.54847645429363, "grad_norm": 0.8009999990463257, "learning_rate": 1.389e-05, "loss": 0.0336, "step": 4633 }, { "epoch": 8.550323176361957, "grad_norm": 0.6101869940757751, "learning_rate": 1.3893e-05, "loss": 0.0557, "step": 4634 }, { "epoch": 8.552169898430286, "grad_norm": 0.7427794933319092, "learning_rate": 1.3896e-05, "loss": 0.0477, "step": 4635 }, { "epoch": 8.554016620498615, "grad_norm": 0.9742395281791687, "learning_rate": 1.3899e-05, "loss": 0.0373, "step": 4636 }, { "epoch": 8.555863342566944, "grad_norm": 1.3674216270446777, "learning_rate": 1.3902e-05, "loss": 0.2994, "step": 4637 }, { "epoch": 8.557710064635273, "grad_norm": 1.1344772577285767, "learning_rate": 1.3905000000000002e-05, "loss": 0.2497, "step": 4638 }, { "epoch": 8.5595567867036, "grad_norm": 1.1575214862823486, "learning_rate": 1.3908000000000001e-05, "loss": 0.2529, "step": 4639 }, { "epoch": 8.56140350877193, "grad_norm": 0.8929944038391113, "learning_rate": 1.3911000000000001e-05, "loss": 0.2333, "step": 4640 }, { "epoch": 8.563250230840259, "grad_norm": 0.8854674100875854, "learning_rate": 1.3914e-05, "loss": 0.1999, "step": 4641 }, { "epoch": 8.565096952908588, "grad_norm": 0.9652591347694397, "learning_rate": 1.3916999999999999e-05, "loss": 0.193, "step": 4642 }, { "epoch": 8.566943674976915, "grad_norm": 0.685058057308197, "learning_rate": 1.392e-05, "loss": 0.1591, "step": 4643 }, { "epoch": 8.568790397045245, "grad_norm": 0.6758031845092773, "learning_rate": 1.3923e-05, "loss": 0.1499, "step": 4644 }, { "epoch": 8.570637119113574, "grad_norm": 0.7304430603981018, "learning_rate": 1.3926e-05, "loss": 0.1686, "step": 4645 }, { "epoch": 8.572483841181903, "grad_norm": 0.6966080069541931, "learning_rate": 1.3929e-05, "loss": 0.1095, "step": 4646 }, { "epoch": 8.57433056325023, "grad_norm": 0.9024938344955444, "learning_rate": 1.3932e-05, "loss": 0.1383, "step": 4647 }, { "epoch": 8.57617728531856, "grad_norm": 0.5872875452041626, "learning_rate": 1.3935000000000001e-05, "loss": 0.1028, "step": 4648 }, { "epoch": 8.578024007386889, "grad_norm": 0.976383626461029, "learning_rate": 1.3938000000000001e-05, "loss": 0.075, "step": 4649 }, { "epoch": 8.579870729455218, "grad_norm": 0.7842995524406433, "learning_rate": 1.3941000000000001e-05, "loss": 0.1423, "step": 4650 }, { "epoch": 8.581717451523545, "grad_norm": 0.5742107629776001, "learning_rate": 1.3944e-05, "loss": 0.0525, "step": 4651 }, { "epoch": 8.583564173591874, "grad_norm": 0.430160790681839, "learning_rate": 1.3947e-05, "loss": 0.038, "step": 4652 }, { "epoch": 8.585410895660203, "grad_norm": 0.6016567349433899, "learning_rate": 1.395e-05, "loss": 0.0432, "step": 4653 }, { "epoch": 8.587257617728532, "grad_norm": 0.45133915543556213, "learning_rate": 1.3953e-05, "loss": 0.053, "step": 4654 }, { "epoch": 8.58910433979686, "grad_norm": 0.6579575538635254, "learning_rate": 1.3956e-05, "loss": 0.0683, "step": 4655 }, { "epoch": 8.590951061865189, "grad_norm": 0.9302634000778198, "learning_rate": 1.3959e-05, "loss": 0.0924, "step": 4656 }, { "epoch": 8.592797783933518, "grad_norm": 0.6408567428588867, "learning_rate": 1.3962e-05, "loss": 0.0325, "step": 4657 }, { "epoch": 8.594644506001847, "grad_norm": 0.5441372394561768, "learning_rate": 1.3965000000000001e-05, "loss": 0.0242, "step": 4658 }, { "epoch": 8.596491228070175, "grad_norm": 0.7796469926834106, "learning_rate": 1.3968e-05, "loss": 0.055, "step": 4659 }, { "epoch": 8.598337950138504, "grad_norm": 0.46276140213012695, "learning_rate": 1.3971e-05, "loss": 0.0349, "step": 4660 }, { "epoch": 8.600184672206833, "grad_norm": 0.4005415439605713, "learning_rate": 1.3974e-05, "loss": 0.0215, "step": 4661 }, { "epoch": 8.602031394275162, "grad_norm": 0.478162944316864, "learning_rate": 1.3977e-05, "loss": 0.0313, "step": 4662 }, { "epoch": 8.603878116343491, "grad_norm": 0.6175740361213684, "learning_rate": 1.3980000000000002e-05, "loss": 0.046, "step": 4663 }, { "epoch": 8.605724838411819, "grad_norm": 0.43752771615982056, "learning_rate": 1.3983000000000001e-05, "loss": 0.0263, "step": 4664 }, { "epoch": 8.607571560480148, "grad_norm": 0.5159079432487488, "learning_rate": 1.3986000000000001e-05, "loss": 0.064, "step": 4665 }, { "epoch": 8.609418282548477, "grad_norm": 0.44390735030174255, "learning_rate": 1.3989e-05, "loss": 0.0253, "step": 4666 }, { "epoch": 8.611265004616806, "grad_norm": 0.9245308637619019, "learning_rate": 1.3992e-05, "loss": 0.0375, "step": 4667 }, { "epoch": 8.613111726685133, "grad_norm": 0.644258975982666, "learning_rate": 1.3995e-05, "loss": 0.0519, "step": 4668 }, { "epoch": 8.614958448753463, "grad_norm": 0.6799528002738953, "learning_rate": 1.3998e-05, "loss": 0.0317, "step": 4669 }, { "epoch": 8.616805170821792, "grad_norm": 0.6856921315193176, "learning_rate": 1.4001e-05, "loss": 0.0274, "step": 4670 }, { "epoch": 8.61865189289012, "grad_norm": 0.5836756825447083, "learning_rate": 1.4004e-05, "loss": 0.0345, "step": 4671 }, { "epoch": 8.620498614958448, "grad_norm": 0.94598388671875, "learning_rate": 1.4007e-05, "loss": 0.0319, "step": 4672 }, { "epoch": 8.622345337026777, "grad_norm": 0.6048069000244141, "learning_rate": 1.4010000000000001e-05, "loss": 0.0371, "step": 4673 }, { "epoch": 8.624192059095106, "grad_norm": 0.6870465278625488, "learning_rate": 1.4013000000000001e-05, "loss": 0.0268, "step": 4674 }, { "epoch": 8.626038781163436, "grad_norm": 0.5122883319854736, "learning_rate": 1.4016000000000001e-05, "loss": 0.032, "step": 4675 }, { "epoch": 8.627885503231763, "grad_norm": 0.653048038482666, "learning_rate": 1.4019e-05, "loss": 0.0393, "step": 4676 }, { "epoch": 8.629732225300092, "grad_norm": 0.5902331471443176, "learning_rate": 1.4022e-05, "loss": 0.0383, "step": 4677 }, { "epoch": 8.631578947368421, "grad_norm": 0.6037182807922363, "learning_rate": 1.4025e-05, "loss": 0.0329, "step": 4678 }, { "epoch": 8.63342566943675, "grad_norm": 0.9846788644790649, "learning_rate": 1.4028e-05, "loss": 0.0627, "step": 4679 }, { "epoch": 8.635272391505078, "grad_norm": 0.4753408432006836, "learning_rate": 1.4031e-05, "loss": 0.0253, "step": 4680 }, { "epoch": 8.637119113573407, "grad_norm": 0.5847453474998474, "learning_rate": 1.4034e-05, "loss": 0.0326, "step": 4681 }, { "epoch": 8.638965835641736, "grad_norm": 0.6286370158195496, "learning_rate": 1.4037e-05, "loss": 0.0384, "step": 4682 }, { "epoch": 8.640812557710065, "grad_norm": 0.6938286423683167, "learning_rate": 1.4040000000000001e-05, "loss": 0.042, "step": 4683 }, { "epoch": 8.642659279778393, "grad_norm": 0.8416529297828674, "learning_rate": 1.4043000000000001e-05, "loss": 0.0444, "step": 4684 }, { "epoch": 8.644506001846722, "grad_norm": 0.6412912607192993, "learning_rate": 1.4046e-05, "loss": 0.0261, "step": 4685 }, { "epoch": 8.64635272391505, "grad_norm": 0.8336824178695679, "learning_rate": 1.4049e-05, "loss": 0.046, "step": 4686 }, { "epoch": 8.64819944598338, "grad_norm": 1.7124323844909668, "learning_rate": 1.4052e-05, "loss": 0.3239, "step": 4687 }, { "epoch": 8.65004616805171, "grad_norm": 0.948986828327179, "learning_rate": 1.4055000000000002e-05, "loss": 0.2906, "step": 4688 }, { "epoch": 8.651892890120036, "grad_norm": 1.0201292037963867, "learning_rate": 1.4058000000000002e-05, "loss": 0.2812, "step": 4689 }, { "epoch": 8.653739612188366, "grad_norm": 0.8282414674758911, "learning_rate": 1.4061e-05, "loss": 0.2266, "step": 4690 }, { "epoch": 8.655586334256695, "grad_norm": 0.8216656446456909, "learning_rate": 1.4064e-05, "loss": 0.1907, "step": 4691 }, { "epoch": 8.657433056325024, "grad_norm": 0.783562421798706, "learning_rate": 1.4067e-05, "loss": 0.1587, "step": 4692 }, { "epoch": 8.659279778393351, "grad_norm": 1.0356827974319458, "learning_rate": 1.4069999999999999e-05, "loss": 0.1776, "step": 4693 }, { "epoch": 8.66112650046168, "grad_norm": 0.6998425126075745, "learning_rate": 1.4073e-05, "loss": 0.1448, "step": 4694 }, { "epoch": 8.66297322253001, "grad_norm": 1.0429692268371582, "learning_rate": 1.4076e-05, "loss": 0.1745, "step": 4695 }, { "epoch": 8.664819944598339, "grad_norm": 1.0342075824737549, "learning_rate": 1.4079e-05, "loss": 0.1288, "step": 4696 }, { "epoch": 8.666666666666666, "grad_norm": 0.6225987076759338, "learning_rate": 1.4082e-05, "loss": 0.1283, "step": 4697 }, { "epoch": 8.668513388734995, "grad_norm": 0.8024329543113708, "learning_rate": 1.4085e-05, "loss": 0.1496, "step": 4698 }, { "epoch": 8.670360110803324, "grad_norm": 1.1623533964157104, "learning_rate": 1.4088000000000001e-05, "loss": 0.1742, "step": 4699 }, { "epoch": 8.672206832871654, "grad_norm": 0.6703691482543945, "learning_rate": 1.4091000000000001e-05, "loss": 0.0818, "step": 4700 }, { "epoch": 8.67405355493998, "grad_norm": 0.5389675498008728, "learning_rate": 1.4094000000000001e-05, "loss": 0.0464, "step": 4701 }, { "epoch": 8.67590027700831, "grad_norm": 0.38047945499420166, "learning_rate": 1.4097e-05, "loss": 0.039, "step": 4702 }, { "epoch": 8.67774699907664, "grad_norm": 0.5646745562553406, "learning_rate": 1.4099999999999999e-05, "loss": 0.0392, "step": 4703 }, { "epoch": 8.679593721144968, "grad_norm": 0.8179369568824768, "learning_rate": 1.4103e-05, "loss": 0.0522, "step": 4704 }, { "epoch": 8.681440443213296, "grad_norm": 0.46813997626304626, "learning_rate": 1.4106e-05, "loss": 0.0409, "step": 4705 }, { "epoch": 8.683287165281625, "grad_norm": 0.8852002024650574, "learning_rate": 1.4109e-05, "loss": 0.0412, "step": 4706 }, { "epoch": 8.685133887349954, "grad_norm": 0.3803097605705261, "learning_rate": 1.4112e-05, "loss": 0.0348, "step": 4707 }, { "epoch": 8.686980609418283, "grad_norm": 0.858212947845459, "learning_rate": 1.4115e-05, "loss": 0.0358, "step": 4708 }, { "epoch": 8.68882733148661, "grad_norm": 0.5910655856132507, "learning_rate": 1.4118000000000001e-05, "loss": 0.0353, "step": 4709 }, { "epoch": 8.69067405355494, "grad_norm": 0.4760793149471283, "learning_rate": 1.4121e-05, "loss": 0.0341, "step": 4710 }, { "epoch": 8.692520775623269, "grad_norm": 0.3494793176651001, "learning_rate": 1.4124e-05, "loss": 0.0199, "step": 4711 }, { "epoch": 8.694367497691598, "grad_norm": 0.6482746601104736, "learning_rate": 1.4127e-05, "loss": 0.0303, "step": 4712 }, { "epoch": 8.696214219759927, "grad_norm": 0.547419548034668, "learning_rate": 1.413e-05, "loss": 0.0254, "step": 4713 }, { "epoch": 8.698060941828254, "grad_norm": 0.493529736995697, "learning_rate": 1.4133000000000002e-05, "loss": 0.0318, "step": 4714 }, { "epoch": 8.699907663896584, "grad_norm": 0.4994904696941376, "learning_rate": 1.4136e-05, "loss": 0.0191, "step": 4715 }, { "epoch": 8.701754385964913, "grad_norm": 0.3659327030181885, "learning_rate": 1.4139e-05, "loss": 0.0182, "step": 4716 }, { "epoch": 8.703601108033242, "grad_norm": 0.5548020601272583, "learning_rate": 1.4142e-05, "loss": 0.0366, "step": 4717 }, { "epoch": 8.70544783010157, "grad_norm": 0.8539415597915649, "learning_rate": 1.4145e-05, "loss": 0.0229, "step": 4718 }, { "epoch": 8.707294552169898, "grad_norm": 0.7660424709320068, "learning_rate": 1.4148e-05, "loss": 0.0378, "step": 4719 }, { "epoch": 8.709141274238227, "grad_norm": 0.6833702325820923, "learning_rate": 1.4151e-05, "loss": 0.0251, "step": 4720 }, { "epoch": 8.710987996306557, "grad_norm": 0.6271822452545166, "learning_rate": 1.4154e-05, "loss": 0.0209, "step": 4721 }, { "epoch": 8.712834718374884, "grad_norm": 0.5643937587738037, "learning_rate": 1.4157e-05, "loss": 0.0169, "step": 4722 }, { "epoch": 8.714681440443213, "grad_norm": 0.5487121939659119, "learning_rate": 1.416e-05, "loss": 0.0333, "step": 4723 }, { "epoch": 8.716528162511542, "grad_norm": 0.5783451199531555, "learning_rate": 1.4163000000000001e-05, "loss": 0.0316, "step": 4724 }, { "epoch": 8.718374884579871, "grad_norm": 0.7464359402656555, "learning_rate": 1.4166000000000001e-05, "loss": 0.0305, "step": 4725 }, { "epoch": 8.720221606648199, "grad_norm": 0.9655202627182007, "learning_rate": 1.4169000000000001e-05, "loss": 0.029, "step": 4726 }, { "epoch": 8.722068328716528, "grad_norm": 0.885925829410553, "learning_rate": 1.4172e-05, "loss": 0.0289, "step": 4727 }, { "epoch": 8.723915050784857, "grad_norm": 0.8719249367713928, "learning_rate": 1.4174999999999999e-05, "loss": 0.0319, "step": 4728 }, { "epoch": 8.725761772853186, "grad_norm": 0.9360407590866089, "learning_rate": 1.4178e-05, "loss": 0.047, "step": 4729 }, { "epoch": 8.727608494921514, "grad_norm": 0.672272264957428, "learning_rate": 1.4181e-05, "loss": 0.051, "step": 4730 }, { "epoch": 8.729455216989843, "grad_norm": 0.5661531090736389, "learning_rate": 1.4184e-05, "loss": 0.0237, "step": 4731 }, { "epoch": 8.731301939058172, "grad_norm": 0.8892865777015686, "learning_rate": 1.4187e-05, "loss": 0.0275, "step": 4732 }, { "epoch": 8.733148661126501, "grad_norm": 0.5317668318748474, "learning_rate": 1.419e-05, "loss": 0.0258, "step": 4733 }, { "epoch": 8.734995383194828, "grad_norm": 0.7972832322120667, "learning_rate": 1.4193000000000001e-05, "loss": 0.0391, "step": 4734 }, { "epoch": 8.736842105263158, "grad_norm": 0.6717183589935303, "learning_rate": 1.4196000000000001e-05, "loss": 0.029, "step": 4735 }, { "epoch": 8.738688827331487, "grad_norm": 0.8849234580993652, "learning_rate": 1.4199e-05, "loss": 0.0526, "step": 4736 }, { "epoch": 8.740535549399816, "grad_norm": 1.2492313385009766, "learning_rate": 1.4202e-05, "loss": 0.3444, "step": 4737 }, { "epoch": 8.742382271468145, "grad_norm": 0.9331575036048889, "learning_rate": 1.4205e-05, "loss": 0.2953, "step": 4738 }, { "epoch": 8.744228993536472, "grad_norm": 0.8331683278083801, "learning_rate": 1.4208e-05, "loss": 0.2245, "step": 4739 }, { "epoch": 8.746075715604801, "grad_norm": 0.6677910089492798, "learning_rate": 1.4211e-05, "loss": 0.1566, "step": 4740 }, { "epoch": 8.74792243767313, "grad_norm": 0.9906908273696899, "learning_rate": 1.4214e-05, "loss": 0.2517, "step": 4741 }, { "epoch": 8.749769159741458, "grad_norm": 0.6925485730171204, "learning_rate": 1.4217e-05, "loss": 0.1647, "step": 4742 }, { "epoch": 8.751615881809787, "grad_norm": 0.5896223783493042, "learning_rate": 1.422e-05, "loss": 0.1449, "step": 4743 }, { "epoch": 8.753462603878116, "grad_norm": 0.5901791453361511, "learning_rate": 1.4223000000000001e-05, "loss": 0.1015, "step": 4744 }, { "epoch": 8.755309325946445, "grad_norm": 0.6510495543479919, "learning_rate": 1.4226e-05, "loss": 0.1121, "step": 4745 }, { "epoch": 8.757156048014775, "grad_norm": 0.635270893573761, "learning_rate": 1.4229e-05, "loss": 0.105, "step": 4746 }, { "epoch": 8.759002770083102, "grad_norm": 0.5553750395774841, "learning_rate": 1.4232e-05, "loss": 0.0773, "step": 4747 }, { "epoch": 8.760849492151431, "grad_norm": 0.9713811278343201, "learning_rate": 1.4235e-05, "loss": 0.1125, "step": 4748 }, { "epoch": 8.76269621421976, "grad_norm": 0.6695632934570312, "learning_rate": 1.4238000000000002e-05, "loss": 0.099, "step": 4749 }, { "epoch": 8.76454293628809, "grad_norm": 0.6211198568344116, "learning_rate": 1.4241000000000001e-05, "loss": 0.0644, "step": 4750 }, { "epoch": 8.766389658356417, "grad_norm": 0.8161517977714539, "learning_rate": 1.4244000000000001e-05, "loss": 0.0644, "step": 4751 }, { "epoch": 8.768236380424746, "grad_norm": 0.4428170323371887, "learning_rate": 1.4247e-05, "loss": 0.0423, "step": 4752 }, { "epoch": 8.770083102493075, "grad_norm": 0.5940005779266357, "learning_rate": 1.4249999999999999e-05, "loss": 0.0437, "step": 4753 }, { "epoch": 8.771929824561404, "grad_norm": 0.4171900749206543, "learning_rate": 1.4253e-05, "loss": 0.0327, "step": 4754 }, { "epoch": 8.773776546629731, "grad_norm": 0.3937453627586365, "learning_rate": 1.4256e-05, "loss": 0.0255, "step": 4755 }, { "epoch": 8.77562326869806, "grad_norm": 0.6645901799201965, "learning_rate": 1.4259e-05, "loss": 0.0366, "step": 4756 }, { "epoch": 8.77746999076639, "grad_norm": 1.1906750202178955, "learning_rate": 1.4262e-05, "loss": 0.0391, "step": 4757 }, { "epoch": 8.779316712834719, "grad_norm": 0.592204213142395, "learning_rate": 1.4265e-05, "loss": 0.0487, "step": 4758 }, { "epoch": 8.781163434903046, "grad_norm": 0.6466925144195557, "learning_rate": 1.4268000000000001e-05, "loss": 0.0395, "step": 4759 }, { "epoch": 8.783010156971375, "grad_norm": 0.531396210193634, "learning_rate": 1.4271000000000001e-05, "loss": 0.0301, "step": 4760 }, { "epoch": 8.784856879039705, "grad_norm": 0.8401514887809753, "learning_rate": 1.4274000000000001e-05, "loss": 0.0415, "step": 4761 }, { "epoch": 8.786703601108034, "grad_norm": 0.6957353353500366, "learning_rate": 1.4277e-05, "loss": 0.0299, "step": 4762 }, { "epoch": 8.788550323176363, "grad_norm": 0.6141769289970398, "learning_rate": 1.428e-05, "loss": 0.0373, "step": 4763 }, { "epoch": 8.79039704524469, "grad_norm": 0.6922504901885986, "learning_rate": 1.4283e-05, "loss": 0.0712, "step": 4764 }, { "epoch": 8.79224376731302, "grad_norm": 0.7587156891822815, "learning_rate": 1.4286e-05, "loss": 0.025, "step": 4765 }, { "epoch": 8.794090489381349, "grad_norm": 0.4232429265975952, "learning_rate": 1.4289e-05, "loss": 0.0319, "step": 4766 }, { "epoch": 8.795937211449676, "grad_norm": 0.6227977871894836, "learning_rate": 1.4292e-05, "loss": 0.0451, "step": 4767 }, { "epoch": 8.797783933518005, "grad_norm": 0.5408965945243835, "learning_rate": 1.4295e-05, "loss": 0.0369, "step": 4768 }, { "epoch": 8.799630655586334, "grad_norm": 0.45448073744773865, "learning_rate": 1.4298000000000001e-05, "loss": 0.023, "step": 4769 }, { "epoch": 8.801477377654663, "grad_norm": 0.42481276392936707, "learning_rate": 1.4301e-05, "loss": 0.0174, "step": 4770 }, { "epoch": 8.803324099722992, "grad_norm": 0.4406210482120514, "learning_rate": 1.4304e-05, "loss": 0.0232, "step": 4771 }, { "epoch": 8.80517082179132, "grad_norm": 0.7529833316802979, "learning_rate": 1.4307e-05, "loss": 0.028, "step": 4772 }, { "epoch": 8.807017543859649, "grad_norm": 0.8267642855644226, "learning_rate": 1.431e-05, "loss": 0.0386, "step": 4773 }, { "epoch": 8.808864265927978, "grad_norm": 0.7792585492134094, "learning_rate": 1.4313000000000002e-05, "loss": 0.0436, "step": 4774 }, { "epoch": 8.810710987996307, "grad_norm": 0.6700918674468994, "learning_rate": 1.4316000000000002e-05, "loss": 0.0339, "step": 4775 }, { "epoch": 8.812557710064635, "grad_norm": 0.4729313552379608, "learning_rate": 1.4319e-05, "loss": 0.0412, "step": 4776 }, { "epoch": 8.814404432132964, "grad_norm": 0.7246980667114258, "learning_rate": 1.4322e-05, "loss": 0.0492, "step": 4777 }, { "epoch": 8.816251154201293, "grad_norm": 0.5295450091362, "learning_rate": 1.4325e-05, "loss": 0.0213, "step": 4778 }, { "epoch": 8.818097876269622, "grad_norm": 0.5199748873710632, "learning_rate": 1.4328e-05, "loss": 0.0266, "step": 4779 }, { "epoch": 8.81994459833795, "grad_norm": 0.5751835703849792, "learning_rate": 1.4331e-05, "loss": 0.0293, "step": 4780 }, { "epoch": 8.821791320406279, "grad_norm": 0.6247977614402771, "learning_rate": 1.4334e-05, "loss": 0.04, "step": 4781 }, { "epoch": 8.823638042474608, "grad_norm": 0.44509533047676086, "learning_rate": 1.4337e-05, "loss": 0.019, "step": 4782 }, { "epoch": 8.825484764542937, "grad_norm": 1.0526492595672607, "learning_rate": 1.434e-05, "loss": 0.036, "step": 4783 }, { "epoch": 8.827331486611264, "grad_norm": 0.4944753050804138, "learning_rate": 1.4343000000000001e-05, "loss": 0.0313, "step": 4784 }, { "epoch": 8.829178208679593, "grad_norm": 0.8059141635894775, "learning_rate": 1.4346000000000001e-05, "loss": 0.0512, "step": 4785 }, { "epoch": 8.831024930747922, "grad_norm": 0.8032324314117432, "learning_rate": 1.4349000000000001e-05, "loss": 0.0417, "step": 4786 }, { "epoch": 8.832871652816252, "grad_norm": 1.1484620571136475, "learning_rate": 1.4352e-05, "loss": 0.2946, "step": 4787 }, { "epoch": 8.83471837488458, "grad_norm": 0.8292638063430786, "learning_rate": 1.4355e-05, "loss": 0.2373, "step": 4788 }, { "epoch": 8.836565096952908, "grad_norm": 0.7926152944564819, "learning_rate": 1.4358e-05, "loss": 0.2248, "step": 4789 }, { "epoch": 8.838411819021237, "grad_norm": 0.909888744354248, "learning_rate": 1.4361e-05, "loss": 0.1743, "step": 4790 }, { "epoch": 8.840258541089566, "grad_norm": 1.0106759071350098, "learning_rate": 1.4364e-05, "loss": 0.2385, "step": 4791 }, { "epoch": 8.842105263157894, "grad_norm": 0.668520450592041, "learning_rate": 1.4367e-05, "loss": 0.1138, "step": 4792 }, { "epoch": 8.843951985226223, "grad_norm": 0.7502413988113403, "learning_rate": 1.437e-05, "loss": 0.1626, "step": 4793 }, { "epoch": 8.845798707294552, "grad_norm": 0.8197979927062988, "learning_rate": 1.4373000000000001e-05, "loss": 0.1745, "step": 4794 }, { "epoch": 8.847645429362881, "grad_norm": 0.7981426119804382, "learning_rate": 1.4376000000000001e-05, "loss": 0.1643, "step": 4795 }, { "epoch": 8.84949215143121, "grad_norm": 0.7516316175460815, "learning_rate": 1.4379e-05, "loss": 0.1304, "step": 4796 }, { "epoch": 8.851338873499538, "grad_norm": 0.6946542263031006, "learning_rate": 1.4382e-05, "loss": 0.1273, "step": 4797 }, { "epoch": 8.853185595567867, "grad_norm": 0.6469597816467285, "learning_rate": 1.4385e-05, "loss": 0.1079, "step": 4798 }, { "epoch": 8.855032317636196, "grad_norm": 0.7252698540687561, "learning_rate": 1.4388000000000002e-05, "loss": 0.1114, "step": 4799 }, { "epoch": 8.856879039704525, "grad_norm": 0.569025456905365, "learning_rate": 1.4391000000000002e-05, "loss": 0.0745, "step": 4800 }, { "epoch": 8.858725761772853, "grad_norm": 0.9024773836135864, "learning_rate": 1.4394e-05, "loss": 0.078, "step": 4801 }, { "epoch": 8.860572483841182, "grad_norm": 0.5917884111404419, "learning_rate": 1.4397e-05, "loss": 0.0585, "step": 4802 }, { "epoch": 8.86241920590951, "grad_norm": 0.6219111680984497, "learning_rate": 1.44e-05, "loss": 0.0345, "step": 4803 }, { "epoch": 8.86426592797784, "grad_norm": 0.46978530287742615, "learning_rate": 1.4403e-05, "loss": 0.0642, "step": 4804 }, { "epoch": 8.866112650046167, "grad_norm": 0.7603000998497009, "learning_rate": 1.4406e-05, "loss": 0.0584, "step": 4805 }, { "epoch": 8.867959372114496, "grad_norm": 0.9757034182548523, "learning_rate": 1.4409e-05, "loss": 0.0394, "step": 4806 }, { "epoch": 8.869806094182826, "grad_norm": 0.5145926475524902, "learning_rate": 1.4412e-05, "loss": 0.0591, "step": 4807 }, { "epoch": 8.871652816251155, "grad_norm": 0.40463876724243164, "learning_rate": 1.4415e-05, "loss": 0.0198, "step": 4808 }, { "epoch": 8.873499538319482, "grad_norm": 0.4177763760089874, "learning_rate": 1.4418000000000002e-05, "loss": 0.0331, "step": 4809 }, { "epoch": 8.875346260387811, "grad_norm": 0.4561941921710968, "learning_rate": 1.4421000000000001e-05, "loss": 0.0333, "step": 4810 }, { "epoch": 8.87719298245614, "grad_norm": 0.36311978101730347, "learning_rate": 1.4424000000000001e-05, "loss": 0.0231, "step": 4811 }, { "epoch": 8.87903970452447, "grad_norm": 0.6601563096046448, "learning_rate": 1.4427000000000001e-05, "loss": 0.0263, "step": 4812 }, { "epoch": 8.880886426592799, "grad_norm": 0.49798139929771423, "learning_rate": 1.4429999999999999e-05, "loss": 0.0271, "step": 4813 }, { "epoch": 8.882733148661126, "grad_norm": 0.5754868388175964, "learning_rate": 1.4433e-05, "loss": 0.0381, "step": 4814 }, { "epoch": 8.884579870729455, "grad_norm": 0.6019905209541321, "learning_rate": 1.4436e-05, "loss": 0.0344, "step": 4815 }, { "epoch": 8.886426592797784, "grad_norm": 0.39315265417099, "learning_rate": 1.4439e-05, "loss": 0.0229, "step": 4816 }, { "epoch": 8.888273314866112, "grad_norm": 0.4014921486377716, "learning_rate": 1.4442e-05, "loss": 0.018, "step": 4817 }, { "epoch": 8.89012003693444, "grad_norm": 0.5342328548431396, "learning_rate": 1.4445e-05, "loss": 0.0272, "step": 4818 }, { "epoch": 8.89196675900277, "grad_norm": 0.5129212141036987, "learning_rate": 1.4448e-05, "loss": 0.0189, "step": 4819 }, { "epoch": 8.8938134810711, "grad_norm": 0.4567769169807434, "learning_rate": 1.4451000000000001e-05, "loss": 0.0205, "step": 4820 }, { "epoch": 8.895660203139428, "grad_norm": 0.5290626287460327, "learning_rate": 1.4454000000000001e-05, "loss": 0.0312, "step": 4821 }, { "epoch": 8.897506925207756, "grad_norm": 0.5846696496009827, "learning_rate": 1.4457e-05, "loss": 0.0266, "step": 4822 }, { "epoch": 8.899353647276085, "grad_norm": 0.9421910643577576, "learning_rate": 1.446e-05, "loss": 0.0623, "step": 4823 }, { "epoch": 8.901200369344414, "grad_norm": 0.532992422580719, "learning_rate": 1.4463e-05, "loss": 0.0336, "step": 4824 }, { "epoch": 8.903047091412743, "grad_norm": 0.4452487826347351, "learning_rate": 1.4466e-05, "loss": 0.0262, "step": 4825 }, { "epoch": 8.90489381348107, "grad_norm": 0.6956632733345032, "learning_rate": 1.4469e-05, "loss": 0.0392, "step": 4826 }, { "epoch": 8.9067405355494, "grad_norm": 0.6956707835197449, "learning_rate": 1.4472e-05, "loss": 0.0369, "step": 4827 }, { "epoch": 8.908587257617729, "grad_norm": 0.5482403635978699, "learning_rate": 1.4475e-05, "loss": 0.0269, "step": 4828 }, { "epoch": 8.910433979686058, "grad_norm": 0.5756843686103821, "learning_rate": 1.4478e-05, "loss": 0.0346, "step": 4829 }, { "epoch": 8.912280701754385, "grad_norm": 0.3994274437427521, "learning_rate": 1.4481e-05, "loss": 0.0266, "step": 4830 }, { "epoch": 8.914127423822714, "grad_norm": 0.5239002108573914, "learning_rate": 1.4484e-05, "loss": 0.0301, "step": 4831 }, { "epoch": 8.915974145891044, "grad_norm": 0.5991451740264893, "learning_rate": 1.4487e-05, "loss": 0.0362, "step": 4832 }, { "epoch": 8.917820867959373, "grad_norm": 0.5694364905357361, "learning_rate": 1.449e-05, "loss": 0.0286, "step": 4833 }, { "epoch": 8.9196675900277, "grad_norm": 0.8278652429580688, "learning_rate": 1.4493e-05, "loss": 0.0487, "step": 4834 }, { "epoch": 8.92151431209603, "grad_norm": 0.6823614835739136, "learning_rate": 1.4496000000000001e-05, "loss": 0.0332, "step": 4835 }, { "epoch": 8.923361034164358, "grad_norm": 1.0626842975616455, "learning_rate": 1.4499000000000001e-05, "loss": 0.0489, "step": 4836 }, { "epoch": 8.925207756232687, "grad_norm": 1.330648422241211, "learning_rate": 1.4502000000000001e-05, "loss": 0.295, "step": 4837 }, { "epoch": 8.927054478301017, "grad_norm": 0.8124020099639893, "learning_rate": 1.4505e-05, "loss": 0.2178, "step": 4838 }, { "epoch": 8.928901200369344, "grad_norm": 1.0896893739700317, "learning_rate": 1.4507999999999999e-05, "loss": 0.2638, "step": 4839 }, { "epoch": 8.930747922437673, "grad_norm": 1.3501861095428467, "learning_rate": 1.4511e-05, "loss": 0.2023, "step": 4840 }, { "epoch": 8.932594644506002, "grad_norm": 0.8362381458282471, "learning_rate": 1.4514e-05, "loss": 0.142, "step": 4841 }, { "epoch": 8.93444136657433, "grad_norm": 0.6680560111999512, "learning_rate": 1.4517e-05, "loss": 0.1191, "step": 4842 }, { "epoch": 8.936288088642659, "grad_norm": 0.6559258103370667, "learning_rate": 1.452e-05, "loss": 0.1294, "step": 4843 }, { "epoch": 8.938134810710988, "grad_norm": 1.0417401790618896, "learning_rate": 1.4523e-05, "loss": 0.143, "step": 4844 }, { "epoch": 8.939981532779317, "grad_norm": 1.0837085247039795, "learning_rate": 1.4526000000000001e-05, "loss": 0.1465, "step": 4845 }, { "epoch": 8.941828254847646, "grad_norm": 0.5968208312988281, "learning_rate": 1.4529000000000001e-05, "loss": 0.0799, "step": 4846 }, { "epoch": 8.943674976915974, "grad_norm": 0.495822012424469, "learning_rate": 1.4532e-05, "loss": 0.0643, "step": 4847 }, { "epoch": 8.945521698984303, "grad_norm": 0.7860730886459351, "learning_rate": 1.4535e-05, "loss": 0.0953, "step": 4848 }, { "epoch": 8.947368421052632, "grad_norm": 0.5441517233848572, "learning_rate": 1.4538e-05, "loss": 0.0703, "step": 4849 }, { "epoch": 8.949215143120961, "grad_norm": 0.5062694549560547, "learning_rate": 1.4541e-05, "loss": 0.039, "step": 4850 }, { "epoch": 8.951061865189288, "grad_norm": 0.6828703880310059, "learning_rate": 1.4544e-05, "loss": 0.0599, "step": 4851 }, { "epoch": 8.952908587257618, "grad_norm": 0.5830159187316895, "learning_rate": 1.4547e-05, "loss": 0.0811, "step": 4852 }, { "epoch": 8.954755309325947, "grad_norm": 0.44063887000083923, "learning_rate": 1.455e-05, "loss": 0.0283, "step": 4853 }, { "epoch": 8.956602031394276, "grad_norm": 0.5686138868331909, "learning_rate": 1.4553e-05, "loss": 0.0335, "step": 4854 }, { "epoch": 8.958448753462603, "grad_norm": 0.6429723501205444, "learning_rate": 1.4556000000000001e-05, "loss": 0.0349, "step": 4855 }, { "epoch": 8.960295475530932, "grad_norm": 0.43935972452163696, "learning_rate": 1.4559e-05, "loss": 0.0238, "step": 4856 }, { "epoch": 8.962142197599261, "grad_norm": 0.5402309894561768, "learning_rate": 1.4562e-05, "loss": 0.029, "step": 4857 }, { "epoch": 8.96398891966759, "grad_norm": 0.7583809494972229, "learning_rate": 1.4565e-05, "loss": 0.0632, "step": 4858 }, { "epoch": 8.965835641735918, "grad_norm": 0.5806187391281128, "learning_rate": 1.4568e-05, "loss": 0.0397, "step": 4859 }, { "epoch": 8.967682363804247, "grad_norm": 0.4688240587711334, "learning_rate": 1.4571000000000002e-05, "loss": 0.0358, "step": 4860 }, { "epoch": 8.969529085872576, "grad_norm": 0.6508371829986572, "learning_rate": 1.4574000000000001e-05, "loss": 0.0313, "step": 4861 }, { "epoch": 8.971375807940905, "grad_norm": 0.5394099950790405, "learning_rate": 1.4577e-05, "loss": 0.0347, "step": 4862 }, { "epoch": 8.973222530009235, "grad_norm": 0.2626785635948181, "learning_rate": 1.458e-05, "loss": 0.014, "step": 4863 }, { "epoch": 8.975069252077562, "grad_norm": 0.7136227488517761, "learning_rate": 1.4582999999999999e-05, "loss": 0.0326, "step": 4864 }, { "epoch": 8.976915974145891, "grad_norm": 0.5395230054855347, "learning_rate": 1.4586e-05, "loss": 0.024, "step": 4865 }, { "epoch": 8.97876269621422, "grad_norm": 0.5298368334770203, "learning_rate": 1.4589e-05, "loss": 0.038, "step": 4866 }, { "epoch": 8.980609418282548, "grad_norm": 0.6000943779945374, "learning_rate": 1.4592e-05, "loss": 0.0362, "step": 4867 }, { "epoch": 8.982456140350877, "grad_norm": 0.5490943193435669, "learning_rate": 1.4595e-05, "loss": 0.0466, "step": 4868 }, { "epoch": 8.984302862419206, "grad_norm": 0.5911248326301575, "learning_rate": 1.4598e-05, "loss": 0.035, "step": 4869 }, { "epoch": 8.986149584487535, "grad_norm": 0.5520859956741333, "learning_rate": 1.4601000000000001e-05, "loss": 0.0173, "step": 4870 }, { "epoch": 8.987996306555864, "grad_norm": 0.45630595088005066, "learning_rate": 1.4604000000000001e-05, "loss": 0.0276, "step": 4871 }, { "epoch": 8.989843028624191, "grad_norm": 0.4523727297782898, "learning_rate": 1.4607000000000001e-05, "loss": 0.0223, "step": 4872 }, { "epoch": 8.99168975069252, "grad_norm": 0.5295506119728088, "learning_rate": 1.461e-05, "loss": 0.0301, "step": 4873 }, { "epoch": 8.99353647276085, "grad_norm": 0.6663303375244141, "learning_rate": 1.4613e-05, "loss": 0.0329, "step": 4874 }, { "epoch": 8.995383194829179, "grad_norm": 0.7034157514572144, "learning_rate": 1.4616e-05, "loss": 0.0316, "step": 4875 }, { "epoch": 8.997229916897506, "grad_norm": 0.5341060161590576, "learning_rate": 1.4619e-05, "loss": 0.0348, "step": 4876 }, { "epoch": 8.999076638965835, "grad_norm": 0.8159637451171875, "learning_rate": 1.4622e-05, "loss": 0.0454, "step": 4877 }, { "epoch": 9.0, "grad_norm": 0.6270403265953064, "learning_rate": 1.4625e-05, "loss": 0.0281, "step": 4878 }, { "epoch": 9.00184672206833, "grad_norm": 0.7949193120002747, "learning_rate": 1.4628e-05, "loss": 0.2474, "step": 4879 }, { "epoch": 9.003693444136658, "grad_norm": 0.7404318451881409, "learning_rate": 1.4631000000000001e-05, "loss": 0.2327, "step": 4880 }, { "epoch": 9.005540166204986, "grad_norm": 0.704534649848938, "learning_rate": 1.4634e-05, "loss": 0.2012, "step": 4881 }, { "epoch": 9.007386888273315, "grad_norm": 0.7474093437194824, "learning_rate": 1.4637e-05, "loss": 0.187, "step": 4882 }, { "epoch": 9.009233610341644, "grad_norm": 0.8021607995033264, "learning_rate": 1.464e-05, "loss": 0.2167, "step": 4883 }, { "epoch": 9.011080332409973, "grad_norm": 0.6698554754257202, "learning_rate": 1.4643e-05, "loss": 0.184, "step": 4884 }, { "epoch": 9.0129270544783, "grad_norm": 0.5793368220329285, "learning_rate": 1.4646000000000002e-05, "loss": 0.0935, "step": 4885 }, { "epoch": 9.01477377654663, "grad_norm": 0.963193416595459, "learning_rate": 1.4649000000000002e-05, "loss": 0.1459, "step": 4886 }, { "epoch": 9.016620498614959, "grad_norm": 0.5509797930717468, "learning_rate": 1.4652e-05, "loss": 0.0893, "step": 4887 }, { "epoch": 9.018467220683288, "grad_norm": 0.5855543613433838, "learning_rate": 1.4655e-05, "loss": 0.1437, "step": 4888 }, { "epoch": 9.020313942751615, "grad_norm": 0.5491319894790649, "learning_rate": 1.4658e-05, "loss": 0.1161, "step": 4889 }, { "epoch": 9.022160664819944, "grad_norm": 0.6451462507247925, "learning_rate": 1.4661e-05, "loss": 0.0724, "step": 4890 }, { "epoch": 9.024007386888274, "grad_norm": 0.5323352217674255, "learning_rate": 1.4664e-05, "loss": 0.0917, "step": 4891 }, { "epoch": 9.025854108956603, "grad_norm": 0.5460320115089417, "learning_rate": 1.4667e-05, "loss": 0.0553, "step": 4892 }, { "epoch": 9.02770083102493, "grad_norm": 0.6950182914733887, "learning_rate": 1.467e-05, "loss": 0.1156, "step": 4893 }, { "epoch": 9.02954755309326, "grad_norm": 0.9646598696708679, "learning_rate": 1.4673e-05, "loss": 0.0439, "step": 4894 }, { "epoch": 9.031394275161588, "grad_norm": 0.4271436929702759, "learning_rate": 1.4676000000000001e-05, "loss": 0.0377, "step": 4895 }, { "epoch": 9.033240997229917, "grad_norm": 0.6988269686698914, "learning_rate": 1.4679000000000001e-05, "loss": 0.0372, "step": 4896 }, { "epoch": 9.035087719298245, "grad_norm": 0.4358237683773041, "learning_rate": 1.4682000000000001e-05, "loss": 0.0305, "step": 4897 }, { "epoch": 9.036934441366574, "grad_norm": 0.47687071561813354, "learning_rate": 1.4685000000000001e-05, "loss": 0.0283, "step": 4898 }, { "epoch": 9.038781163434903, "grad_norm": 0.589789628982544, "learning_rate": 1.4687999999999999e-05, "loss": 0.0187, "step": 4899 }, { "epoch": 9.040627885503232, "grad_norm": 0.7305690050125122, "learning_rate": 1.4691e-05, "loss": 0.05, "step": 4900 }, { "epoch": 9.04247460757156, "grad_norm": 0.3432635962963104, "learning_rate": 1.4694e-05, "loss": 0.0259, "step": 4901 }, { "epoch": 9.044321329639889, "grad_norm": 0.3739214837551117, "learning_rate": 1.4697e-05, "loss": 0.03, "step": 4902 }, { "epoch": 9.046168051708218, "grad_norm": 0.5927174091339111, "learning_rate": 1.47e-05, "loss": 0.037, "step": 4903 }, { "epoch": 9.048014773776547, "grad_norm": 0.7059500813484192, "learning_rate": 1.4703e-05, "loss": 0.0446, "step": 4904 }, { "epoch": 9.049861495844876, "grad_norm": 0.37194669246673584, "learning_rate": 1.4706000000000001e-05, "loss": 0.0214, "step": 4905 }, { "epoch": 9.051708217913204, "grad_norm": 1.4105937480926514, "learning_rate": 1.4709000000000001e-05, "loss": 0.056, "step": 4906 }, { "epoch": 9.053554939981533, "grad_norm": 0.6077854037284851, "learning_rate": 1.4712e-05, "loss": 0.029, "step": 4907 }, { "epoch": 9.055401662049862, "grad_norm": 0.4426223337650299, "learning_rate": 1.4715e-05, "loss": 0.0204, "step": 4908 }, { "epoch": 9.057248384118191, "grad_norm": 1.0114620923995972, "learning_rate": 1.4718e-05, "loss": 0.0545, "step": 4909 }, { "epoch": 9.059095106186518, "grad_norm": 0.5293107032775879, "learning_rate": 1.4721000000000002e-05, "loss": 0.0183, "step": 4910 }, { "epoch": 9.060941828254848, "grad_norm": 0.7033582329750061, "learning_rate": 1.4724e-05, "loss": 0.0342, "step": 4911 }, { "epoch": 9.062788550323177, "grad_norm": 0.4826132655143738, "learning_rate": 1.4727e-05, "loss": 0.0257, "step": 4912 }, { "epoch": 9.064635272391506, "grad_norm": 0.5029767155647278, "learning_rate": 1.473e-05, "loss": 0.0249, "step": 4913 }, { "epoch": 9.066481994459833, "grad_norm": 0.35271936655044556, "learning_rate": 1.4733e-05, "loss": 0.0197, "step": 4914 }, { "epoch": 9.068328716528162, "grad_norm": 0.6118987202644348, "learning_rate": 1.4736000000000001e-05, "loss": 0.017, "step": 4915 }, { "epoch": 9.070175438596491, "grad_norm": 0.4268760681152344, "learning_rate": 1.4739e-05, "loss": 0.0292, "step": 4916 }, { "epoch": 9.07202216066482, "grad_norm": 0.6673272252082825, "learning_rate": 1.4742e-05, "loss": 0.037, "step": 4917 }, { "epoch": 9.073868882733148, "grad_norm": 0.5449793934822083, "learning_rate": 1.4745e-05, "loss": 0.0386, "step": 4918 }, { "epoch": 9.075715604801477, "grad_norm": 0.45347753167152405, "learning_rate": 1.4748e-05, "loss": 0.0207, "step": 4919 }, { "epoch": 9.077562326869806, "grad_norm": 0.36634916067123413, "learning_rate": 1.4751000000000002e-05, "loss": 0.0185, "step": 4920 }, { "epoch": 9.079409048938135, "grad_norm": 0.9379186034202576, "learning_rate": 1.4754000000000001e-05, "loss": 0.0508, "step": 4921 }, { "epoch": 9.081255771006463, "grad_norm": 0.5121855139732361, "learning_rate": 1.4757000000000001e-05, "loss": 0.0271, "step": 4922 }, { "epoch": 9.083102493074792, "grad_norm": 1.2979964017868042, "learning_rate": 1.4760000000000001e-05, "loss": 0.0269, "step": 4923 }, { "epoch": 9.084949215143121, "grad_norm": 0.4717375934123993, "learning_rate": 1.4762999999999999e-05, "loss": 0.0234, "step": 4924 }, { "epoch": 9.08679593721145, "grad_norm": 1.4416847229003906, "learning_rate": 1.4766e-05, "loss": 0.0359, "step": 4925 }, { "epoch": 9.088642659279778, "grad_norm": 0.8894223570823669, "learning_rate": 1.4769e-05, "loss": 0.0437, "step": 4926 }, { "epoch": 9.090489381348107, "grad_norm": 0.4688844382762909, "learning_rate": 1.4772e-05, "loss": 0.0261, "step": 4927 }, { "epoch": 9.092336103416436, "grad_norm": 0.5282978415489197, "learning_rate": 1.4775e-05, "loss": 0.0194, "step": 4928 }, { "epoch": 9.094182825484765, "grad_norm": 0.9984002113342285, "learning_rate": 1.4778e-05, "loss": 0.3795, "step": 4929 }, { "epoch": 9.096029547553094, "grad_norm": 0.9583333134651184, "learning_rate": 1.4781000000000001e-05, "loss": 0.2978, "step": 4930 }, { "epoch": 9.097876269621421, "grad_norm": 0.9209781885147095, "learning_rate": 1.4784000000000001e-05, "loss": 0.2958, "step": 4931 }, { "epoch": 9.09972299168975, "grad_norm": 0.6429269313812256, "learning_rate": 1.4787000000000001e-05, "loss": 0.1974, "step": 4932 }, { "epoch": 9.10156971375808, "grad_norm": 0.8534770607948303, "learning_rate": 1.479e-05, "loss": 0.2142, "step": 4933 }, { "epoch": 9.103416435826409, "grad_norm": 0.6178396344184875, "learning_rate": 1.4793e-05, "loss": 0.1418, "step": 4934 }, { "epoch": 9.105263157894736, "grad_norm": 0.5363320708274841, "learning_rate": 1.4796000000000002e-05, "loss": 0.0966, "step": 4935 }, { "epoch": 9.107109879963065, "grad_norm": 0.754754364490509, "learning_rate": 1.4799e-05, "loss": 0.1319, "step": 4936 }, { "epoch": 9.108956602031395, "grad_norm": 0.7564411759376526, "learning_rate": 1.4802e-05, "loss": 0.1429, "step": 4937 }, { "epoch": 9.110803324099724, "grad_norm": 1.0024757385253906, "learning_rate": 1.4805e-05, "loss": 0.1361, "step": 4938 }, { "epoch": 9.112650046168051, "grad_norm": 1.137149691581726, "learning_rate": 1.4808e-05, "loss": 0.0977, "step": 4939 }, { "epoch": 9.11449676823638, "grad_norm": 0.846430242061615, "learning_rate": 1.4811000000000001e-05, "loss": 0.104, "step": 4940 }, { "epoch": 9.11634349030471, "grad_norm": 0.9774792194366455, "learning_rate": 1.4814e-05, "loss": 0.0959, "step": 4941 }, { "epoch": 9.118190212373039, "grad_norm": 0.5531234741210938, "learning_rate": 1.4817e-05, "loss": 0.0644, "step": 4942 }, { "epoch": 9.120036934441366, "grad_norm": 0.7496355175971985, "learning_rate": 1.482e-05, "loss": 0.0717, "step": 4943 }, { "epoch": 9.121883656509695, "grad_norm": 0.5128370523452759, "learning_rate": 1.4823e-05, "loss": 0.0551, "step": 4944 }, { "epoch": 9.123730378578024, "grad_norm": 0.3992457389831543, "learning_rate": 1.4826e-05, "loss": 0.0504, "step": 4945 }, { "epoch": 9.125577100646353, "grad_norm": 0.5101442337036133, "learning_rate": 1.4829000000000002e-05, "loss": 0.0348, "step": 4946 }, { "epoch": 9.12742382271468, "grad_norm": 0.439591646194458, "learning_rate": 1.4832000000000001e-05, "loss": 0.0311, "step": 4947 }, { "epoch": 9.12927054478301, "grad_norm": 0.38307738304138184, "learning_rate": 1.4835e-05, "loss": 0.0277, "step": 4948 }, { "epoch": 9.131117266851339, "grad_norm": 0.49682798981666565, "learning_rate": 1.4838e-05, "loss": 0.032, "step": 4949 }, { "epoch": 9.132963988919668, "grad_norm": 0.4077721834182739, "learning_rate": 1.4840999999999999e-05, "loss": 0.0248, "step": 4950 }, { "epoch": 9.134810710987995, "grad_norm": 0.5288215279579163, "learning_rate": 1.4844e-05, "loss": 0.039, "step": 4951 }, { "epoch": 9.136657433056325, "grad_norm": 0.49088090658187866, "learning_rate": 1.4847e-05, "loss": 0.0323, "step": 4952 }, { "epoch": 9.138504155124654, "grad_norm": 0.45848211646080017, "learning_rate": 1.485e-05, "loss": 0.0203, "step": 4953 }, { "epoch": 9.140350877192983, "grad_norm": 0.7101696133613586, "learning_rate": 1.4853e-05, "loss": 0.027, "step": 4954 }, { "epoch": 9.142197599261312, "grad_norm": 0.31752264499664307, "learning_rate": 1.4856e-05, "loss": 0.0163, "step": 4955 }, { "epoch": 9.14404432132964, "grad_norm": 0.41779816150665283, "learning_rate": 1.4859000000000001e-05, "loss": 0.0195, "step": 4956 }, { "epoch": 9.145891043397969, "grad_norm": 0.36568403244018555, "learning_rate": 1.4862000000000001e-05, "loss": 0.0212, "step": 4957 }, { "epoch": 9.147737765466298, "grad_norm": 0.7210214734077454, "learning_rate": 1.4865e-05, "loss": 0.0293, "step": 4958 }, { "epoch": 9.149584487534627, "grad_norm": 0.30898305773735046, "learning_rate": 1.4868e-05, "loss": 0.0164, "step": 4959 }, { "epoch": 9.151431209602954, "grad_norm": 0.46881088614463806, "learning_rate": 1.4871e-05, "loss": 0.0285, "step": 4960 }, { "epoch": 9.153277931671283, "grad_norm": 0.4806945025920868, "learning_rate": 1.4874e-05, "loss": 0.0196, "step": 4961 }, { "epoch": 9.155124653739612, "grad_norm": 0.7896641492843628, "learning_rate": 1.4877e-05, "loss": 0.0395, "step": 4962 }, { "epoch": 9.156971375807942, "grad_norm": 0.4346265196800232, "learning_rate": 1.488e-05, "loss": 0.0285, "step": 4963 }, { "epoch": 9.158818097876269, "grad_norm": 0.5687569379806519, "learning_rate": 1.4883e-05, "loss": 0.0194, "step": 4964 }, { "epoch": 9.160664819944598, "grad_norm": 0.6780779361724854, "learning_rate": 1.4886e-05, "loss": 0.034, "step": 4965 }, { "epoch": 9.162511542012927, "grad_norm": 0.4466516673564911, "learning_rate": 1.4889000000000001e-05, "loss": 0.0194, "step": 4966 }, { "epoch": 9.164358264081256, "grad_norm": 0.8937951922416687, "learning_rate": 1.4892e-05, "loss": 0.0461, "step": 4967 }, { "epoch": 9.166204986149584, "grad_norm": 0.43590834736824036, "learning_rate": 1.4895e-05, "loss": 0.0257, "step": 4968 }, { "epoch": 9.168051708217913, "grad_norm": 0.6083908677101135, "learning_rate": 1.4898e-05, "loss": 0.0324, "step": 4969 }, { "epoch": 9.169898430286242, "grad_norm": 0.36719781160354614, "learning_rate": 1.4901e-05, "loss": 0.0204, "step": 4970 }, { "epoch": 9.171745152354571, "grad_norm": 0.6315568089485168, "learning_rate": 1.4904000000000002e-05, "loss": 0.0361, "step": 4971 }, { "epoch": 9.173591874422899, "grad_norm": 0.48444539308547974, "learning_rate": 1.4907000000000001e-05, "loss": 0.0197, "step": 4972 }, { "epoch": 9.175438596491228, "grad_norm": 0.7261915802955627, "learning_rate": 1.491e-05, "loss": 0.0218, "step": 4973 }, { "epoch": 9.177285318559557, "grad_norm": 0.6012871861457825, "learning_rate": 1.4913e-05, "loss": 0.0241, "step": 4974 }, { "epoch": 9.179132040627886, "grad_norm": 0.42087167501449585, "learning_rate": 1.4915999999999999e-05, "loss": 0.0163, "step": 4975 }, { "epoch": 9.180978762696213, "grad_norm": 0.5307780504226685, "learning_rate": 1.4919e-05, "loss": 0.0207, "step": 4976 }, { "epoch": 9.182825484764543, "grad_norm": 0.5719494819641113, "learning_rate": 1.4922e-05, "loss": 0.0255, "step": 4977 }, { "epoch": 9.184672206832872, "grad_norm": 0.8112320303916931, "learning_rate": 1.4925e-05, "loss": 0.0259, "step": 4978 }, { "epoch": 9.1865189289012, "grad_norm": 0.9635986685752869, "learning_rate": 1.4928e-05, "loss": 0.2401, "step": 4979 }, { "epoch": 9.18836565096953, "grad_norm": 0.8502519726753235, "learning_rate": 1.4931e-05, "loss": 0.247, "step": 4980 }, { "epoch": 9.190212373037857, "grad_norm": 0.8046050667762756, "learning_rate": 1.4934000000000001e-05, "loss": 0.2011, "step": 4981 }, { "epoch": 9.192059095106186, "grad_norm": 0.6960356831550598, "learning_rate": 1.4937000000000001e-05, "loss": 0.1636, "step": 4982 }, { "epoch": 9.193905817174516, "grad_norm": 0.73581862449646, "learning_rate": 1.4940000000000001e-05, "loss": 0.144, "step": 4983 }, { "epoch": 9.195752539242845, "grad_norm": 0.6557340621948242, "learning_rate": 1.4943e-05, "loss": 0.1418, "step": 4984 }, { "epoch": 9.197599261311172, "grad_norm": 0.6575548648834229, "learning_rate": 1.4945999999999999e-05, "loss": 0.1428, "step": 4985 }, { "epoch": 9.199445983379501, "grad_norm": 0.5466626882553101, "learning_rate": 1.4949e-05, "loss": 0.1301, "step": 4986 }, { "epoch": 9.20129270544783, "grad_norm": 0.5372762084007263, "learning_rate": 1.4952e-05, "loss": 0.112, "step": 4987 }, { "epoch": 9.20313942751616, "grad_norm": 0.9452289938926697, "learning_rate": 1.4955e-05, "loss": 0.1621, "step": 4988 }, { "epoch": 9.204986149584487, "grad_norm": 0.7675982713699341, "learning_rate": 1.4958e-05, "loss": 0.0883, "step": 4989 }, { "epoch": 9.206832871652816, "grad_norm": 0.41777050495147705, "learning_rate": 1.4961e-05, "loss": 0.0768, "step": 4990 }, { "epoch": 9.208679593721145, "grad_norm": 0.6762139797210693, "learning_rate": 1.4964000000000001e-05, "loss": 0.0669, "step": 4991 }, { "epoch": 9.210526315789474, "grad_norm": 0.6928996443748474, "learning_rate": 1.4967000000000001e-05, "loss": 0.0477, "step": 4992 }, { "epoch": 9.212373037857802, "grad_norm": 0.4634210169315338, "learning_rate": 1.497e-05, "loss": 0.055, "step": 4993 }, { "epoch": 9.21421975992613, "grad_norm": 0.3059404492378235, "learning_rate": 1.4973e-05, "loss": 0.0285, "step": 4994 }, { "epoch": 9.21606648199446, "grad_norm": 0.42111411690711975, "learning_rate": 1.4976e-05, "loss": 0.052, "step": 4995 }, { "epoch": 9.21791320406279, "grad_norm": 0.8493423461914062, "learning_rate": 1.4979000000000002e-05, "loss": 0.0459, "step": 4996 }, { "epoch": 9.219759926131117, "grad_norm": 0.6417502164840698, "learning_rate": 1.4982e-05, "loss": 0.0637, "step": 4997 }, { "epoch": 9.221606648199446, "grad_norm": 0.4340676963329315, "learning_rate": 1.4985e-05, "loss": 0.0339, "step": 4998 }, { "epoch": 9.223453370267775, "grad_norm": 0.47145336866378784, "learning_rate": 1.4988e-05, "loss": 0.0307, "step": 4999 }, { "epoch": 9.225300092336104, "grad_norm": 0.5584102869033813, "learning_rate": 1.4991e-05, "loss": 0.0217, "step": 5000 }, { "epoch": 9.225300092336104, "eval_cer": 0.12315628483293153, "eval_loss": 0.34040430188179016, "eval_runtime": 16.1361, "eval_samples_per_second": 18.84, "eval_steps_per_second": 0.62, "eval_wer": 0.4468534151957022, "step": 5000 }, { "epoch": 9.227146814404431, "grad_norm": 0.4516844153404236, "learning_rate": 1.4994e-05, "loss": 0.0291, "step": 5001 }, { "epoch": 9.22899353647276, "grad_norm": 0.40577274560928345, "learning_rate": 1.4997e-05, "loss": 0.0299, "step": 5002 }, { "epoch": 9.23084025854109, "grad_norm": 0.6855767965316772, "learning_rate": 1.5e-05, "loss": 0.0643, "step": 5003 }, { "epoch": 9.232686980609419, "grad_norm": 0.4035259187221527, "learning_rate": 1.5003e-05, "loss": 0.0304, "step": 5004 }, { "epoch": 9.234533702677748, "grad_norm": 0.4719028174877167, "learning_rate": 1.5006e-05, "loss": 0.0247, "step": 5005 }, { "epoch": 9.236380424746075, "grad_norm": 0.4207598865032196, "learning_rate": 1.5009e-05, "loss": 0.02, "step": 5006 }, { "epoch": 9.238227146814404, "grad_norm": 0.34324145317077637, "learning_rate": 1.5012e-05, "loss": 0.0279, "step": 5007 }, { "epoch": 9.240073868882734, "grad_norm": 0.6258143782615662, "learning_rate": 1.5015e-05, "loss": 0.0685, "step": 5008 }, { "epoch": 9.241920590951063, "grad_norm": 0.6973277926445007, "learning_rate": 1.5018000000000001e-05, "loss": 0.0403, "step": 5009 }, { "epoch": 9.24376731301939, "grad_norm": 0.4057953655719757, "learning_rate": 1.5021e-05, "loss": 0.0283, "step": 5010 }, { "epoch": 9.24561403508772, "grad_norm": 0.42994633316993713, "learning_rate": 1.5024e-05, "loss": 0.0214, "step": 5011 }, { "epoch": 9.247460757156048, "grad_norm": 0.5259776711463928, "learning_rate": 1.5027e-05, "loss": 0.0324, "step": 5012 }, { "epoch": 9.249307479224377, "grad_norm": 0.5950026512145996, "learning_rate": 1.503e-05, "loss": 0.0259, "step": 5013 }, { "epoch": 9.251154201292705, "grad_norm": 0.3533867597579956, "learning_rate": 1.5033e-05, "loss": 0.0127, "step": 5014 }, { "epoch": 9.253000923361034, "grad_norm": 0.5182950496673584, "learning_rate": 1.5036e-05, "loss": 0.025, "step": 5015 }, { "epoch": 9.254847645429363, "grad_norm": 0.9183833599090576, "learning_rate": 1.5039e-05, "loss": 0.0226, "step": 5016 }, { "epoch": 9.256694367497692, "grad_norm": 0.35641777515411377, "learning_rate": 1.5042e-05, "loss": 0.0186, "step": 5017 }, { "epoch": 9.25854108956602, "grad_norm": 0.6755897402763367, "learning_rate": 1.5044999999999999e-05, "loss": 0.0291, "step": 5018 }, { "epoch": 9.260387811634349, "grad_norm": 0.5879606008529663, "learning_rate": 1.5048000000000002e-05, "loss": 0.0334, "step": 5019 }, { "epoch": 9.262234533702678, "grad_norm": 0.5946062207221985, "learning_rate": 1.5051000000000002e-05, "loss": 0.0323, "step": 5020 }, { "epoch": 9.264081255771007, "grad_norm": 0.8611308932304382, "learning_rate": 1.5054000000000002e-05, "loss": 0.0325, "step": 5021 }, { "epoch": 9.265927977839334, "grad_norm": 0.6337459683418274, "learning_rate": 1.5057e-05, "loss": 0.0278, "step": 5022 }, { "epoch": 9.267774699907664, "grad_norm": 0.7506858706474304, "learning_rate": 1.506e-05, "loss": 0.0377, "step": 5023 }, { "epoch": 9.269621421975993, "grad_norm": 0.7468025088310242, "learning_rate": 1.5063e-05, "loss": 0.036, "step": 5024 }, { "epoch": 9.271468144044322, "grad_norm": 0.5505267977714539, "learning_rate": 1.5066e-05, "loss": 0.0298, "step": 5025 }, { "epoch": 9.27331486611265, "grad_norm": 0.43939507007598877, "learning_rate": 1.5069e-05, "loss": 0.0177, "step": 5026 }, { "epoch": 9.275161588180978, "grad_norm": 1.4087032079696655, "learning_rate": 1.5071999999999999e-05, "loss": 0.0352, "step": 5027 }, { "epoch": 9.277008310249307, "grad_norm": 0.6867530345916748, "learning_rate": 1.5074999999999999e-05, "loss": 0.0324, "step": 5028 }, { "epoch": 9.278855032317637, "grad_norm": 1.1742939949035645, "learning_rate": 1.5078000000000002e-05, "loss": 0.3063, "step": 5029 }, { "epoch": 9.280701754385966, "grad_norm": 0.9454092979431152, "learning_rate": 1.5081000000000002e-05, "loss": 0.2511, "step": 5030 }, { "epoch": 9.282548476454293, "grad_norm": 1.0500773191452026, "learning_rate": 1.5084000000000002e-05, "loss": 0.2473, "step": 5031 }, { "epoch": 9.284395198522622, "grad_norm": 1.6689544916152954, "learning_rate": 1.5087000000000001e-05, "loss": 0.2361, "step": 5032 }, { "epoch": 9.286241920590951, "grad_norm": 0.8237836360931396, "learning_rate": 1.5090000000000001e-05, "loss": 0.1845, "step": 5033 }, { "epoch": 9.28808864265928, "grad_norm": 0.7189024686813354, "learning_rate": 1.5093e-05, "loss": 0.1309, "step": 5034 }, { "epoch": 9.289935364727608, "grad_norm": 0.8112640976905823, "learning_rate": 1.5095999999999999e-05, "loss": 0.1582, "step": 5035 }, { "epoch": 9.291782086795937, "grad_norm": 0.6921137571334839, "learning_rate": 1.5098999999999999e-05, "loss": 0.1338, "step": 5036 }, { "epoch": 9.293628808864266, "grad_norm": 0.7306265830993652, "learning_rate": 1.5101999999999999e-05, "loss": 0.148, "step": 5037 }, { "epoch": 9.295475530932595, "grad_norm": 0.6113399863243103, "learning_rate": 1.5104999999999999e-05, "loss": 0.0967, "step": 5038 }, { "epoch": 9.297322253000923, "grad_norm": 0.5886443853378296, "learning_rate": 1.5108000000000002e-05, "loss": 0.1014, "step": 5039 }, { "epoch": 9.299168975069252, "grad_norm": 0.617939293384552, "learning_rate": 1.5111000000000002e-05, "loss": 0.1227, "step": 5040 }, { "epoch": 9.301015697137581, "grad_norm": 0.7025123238563538, "learning_rate": 1.5114000000000001e-05, "loss": 0.0787, "step": 5041 }, { "epoch": 9.30286241920591, "grad_norm": 0.34799450635910034, "learning_rate": 1.5117000000000001e-05, "loss": 0.0315, "step": 5042 }, { "epoch": 9.304709141274238, "grad_norm": 0.5829216241836548, "learning_rate": 1.5120000000000001e-05, "loss": 0.0549, "step": 5043 }, { "epoch": 9.306555863342567, "grad_norm": 0.630823016166687, "learning_rate": 1.5123e-05, "loss": 0.0816, "step": 5044 }, { "epoch": 9.308402585410896, "grad_norm": 0.6002022624015808, "learning_rate": 1.5126e-05, "loss": 0.0489, "step": 5045 }, { "epoch": 9.310249307479225, "grad_norm": 0.6191938519477844, "learning_rate": 1.5129e-05, "loss": 0.0354, "step": 5046 }, { "epoch": 9.312096029547552, "grad_norm": 0.45328018069267273, "learning_rate": 1.5131999999999998e-05, "loss": 0.039, "step": 5047 }, { "epoch": 9.313942751615881, "grad_norm": 0.8805258274078369, "learning_rate": 1.5134999999999998e-05, "loss": 0.0366, "step": 5048 }, { "epoch": 9.31578947368421, "grad_norm": 0.7245248556137085, "learning_rate": 1.5138000000000001e-05, "loss": 0.0715, "step": 5049 }, { "epoch": 9.31763619575254, "grad_norm": 0.5289453864097595, "learning_rate": 1.5141000000000001e-05, "loss": 0.0369, "step": 5050 }, { "epoch": 9.319482917820867, "grad_norm": 0.6848105192184448, "learning_rate": 1.5144000000000001e-05, "loss": 0.0288, "step": 5051 }, { "epoch": 9.321329639889196, "grad_norm": 0.41027796268463135, "learning_rate": 1.5147e-05, "loss": 0.0156, "step": 5052 }, { "epoch": 9.323176361957525, "grad_norm": 0.36490383744239807, "learning_rate": 1.515e-05, "loss": 0.0261, "step": 5053 }, { "epoch": 9.325023084025855, "grad_norm": 0.34889253973960876, "learning_rate": 1.5153e-05, "loss": 0.0243, "step": 5054 }, { "epoch": 9.326869806094184, "grad_norm": 0.6471371054649353, "learning_rate": 1.5156e-05, "loss": 0.0477, "step": 5055 }, { "epoch": 9.328716528162511, "grad_norm": 0.4622030556201935, "learning_rate": 1.5159e-05, "loss": 0.0277, "step": 5056 }, { "epoch": 9.33056325023084, "grad_norm": 0.5205875635147095, "learning_rate": 1.5162e-05, "loss": 0.0272, "step": 5057 }, { "epoch": 9.33240997229917, "grad_norm": 0.35904061794281006, "learning_rate": 1.5165e-05, "loss": 0.0163, "step": 5058 }, { "epoch": 9.334256694367498, "grad_norm": 0.6207438707351685, "learning_rate": 1.5168000000000001e-05, "loss": 0.0328, "step": 5059 }, { "epoch": 9.336103416435826, "grad_norm": 0.7448795437812805, "learning_rate": 1.5171000000000001e-05, "loss": 0.0255, "step": 5060 }, { "epoch": 9.337950138504155, "grad_norm": 0.5210355520248413, "learning_rate": 1.5174e-05, "loss": 0.0203, "step": 5061 }, { "epoch": 9.339796860572484, "grad_norm": 0.474969744682312, "learning_rate": 1.5177e-05, "loss": 0.0191, "step": 5062 }, { "epoch": 9.341643582640813, "grad_norm": 0.40252211689949036, "learning_rate": 1.518e-05, "loss": 0.0244, "step": 5063 }, { "epoch": 9.34349030470914, "grad_norm": 0.7463494539260864, "learning_rate": 1.5183e-05, "loss": 0.0339, "step": 5064 }, { "epoch": 9.34533702677747, "grad_norm": 0.6289669275283813, "learning_rate": 1.5186e-05, "loss": 0.0395, "step": 5065 }, { "epoch": 9.347183748845799, "grad_norm": 0.432212769985199, "learning_rate": 1.5189e-05, "loss": 0.0272, "step": 5066 }, { "epoch": 9.349030470914128, "grad_norm": 0.6764055490493774, "learning_rate": 1.5192e-05, "loss": 0.0319, "step": 5067 }, { "epoch": 9.350877192982455, "grad_norm": 0.6974257230758667, "learning_rate": 1.5195e-05, "loss": 0.0491, "step": 5068 }, { "epoch": 9.352723915050785, "grad_norm": 0.46298375725746155, "learning_rate": 1.5198000000000003e-05, "loss": 0.0181, "step": 5069 }, { "epoch": 9.354570637119114, "grad_norm": 0.5116629004478455, "learning_rate": 1.5201000000000002e-05, "loss": 0.0245, "step": 5070 }, { "epoch": 9.356417359187443, "grad_norm": 0.6362098455429077, "learning_rate": 1.5204e-05, "loss": 0.0248, "step": 5071 }, { "epoch": 9.35826408125577, "grad_norm": 0.768278181552887, "learning_rate": 1.5207e-05, "loss": 0.0259, "step": 5072 }, { "epoch": 9.3601108033241, "grad_norm": 0.42540857195854187, "learning_rate": 1.521e-05, "loss": 0.0279, "step": 5073 }, { "epoch": 9.361957525392429, "grad_norm": 0.5822314023971558, "learning_rate": 1.5213e-05, "loss": 0.0318, "step": 5074 }, { "epoch": 9.363804247460758, "grad_norm": 0.5463801026344299, "learning_rate": 1.5216e-05, "loss": 0.032, "step": 5075 }, { "epoch": 9.365650969529085, "grad_norm": 0.7111003994941711, "learning_rate": 1.5219e-05, "loss": 0.0346, "step": 5076 }, { "epoch": 9.367497691597414, "grad_norm": 0.8963141441345215, "learning_rate": 1.5222e-05, "loss": 0.0408, "step": 5077 }, { "epoch": 9.369344413665743, "grad_norm": 1.1584186553955078, "learning_rate": 1.5224999999999999e-05, "loss": 0.0367, "step": 5078 }, { "epoch": 9.371191135734072, "grad_norm": 1.1298359632492065, "learning_rate": 1.5228000000000002e-05, "loss": 0.3049, "step": 5079 }, { "epoch": 9.373037857802402, "grad_norm": 1.229810357093811, "learning_rate": 1.5231000000000002e-05, "loss": 0.3381, "step": 5080 }, { "epoch": 9.374884579870729, "grad_norm": 0.6872143745422363, "learning_rate": 1.5234000000000002e-05, "loss": 0.1857, "step": 5081 }, { "epoch": 9.376731301939058, "grad_norm": 0.7117223143577576, "learning_rate": 1.5237000000000002e-05, "loss": 0.1947, "step": 5082 }, { "epoch": 9.378578024007387, "grad_norm": 0.6277230978012085, "learning_rate": 1.524e-05, "loss": 0.1643, "step": 5083 }, { "epoch": 9.380424746075716, "grad_norm": 0.951104998588562, "learning_rate": 1.5243e-05, "loss": 0.2928, "step": 5084 }, { "epoch": 9.382271468144044, "grad_norm": 1.0074466466903687, "learning_rate": 1.5246e-05, "loss": 0.1422, "step": 5085 }, { "epoch": 9.384118190212373, "grad_norm": 0.9526377320289612, "learning_rate": 1.5249e-05, "loss": 0.1417, "step": 5086 }, { "epoch": 9.385964912280702, "grad_norm": 0.6263598203659058, "learning_rate": 1.5251999999999999e-05, "loss": 0.1326, "step": 5087 }, { "epoch": 9.387811634349031, "grad_norm": 0.7202224731445312, "learning_rate": 1.5254999999999999e-05, "loss": 0.152, "step": 5088 }, { "epoch": 9.389658356417359, "grad_norm": 0.7987571358680725, "learning_rate": 1.5258000000000002e-05, "loss": 0.1059, "step": 5089 }, { "epoch": 9.391505078485688, "grad_norm": 0.8620315790176392, "learning_rate": 1.5261000000000002e-05, "loss": 0.1033, "step": 5090 }, { "epoch": 9.393351800554017, "grad_norm": 0.6651105880737305, "learning_rate": 1.5264e-05, "loss": 0.1379, "step": 5091 }, { "epoch": 9.395198522622346, "grad_norm": 0.8910331130027771, "learning_rate": 1.5267e-05, "loss": 0.091, "step": 5092 }, { "epoch": 9.397045244690673, "grad_norm": 0.7889306545257568, "learning_rate": 1.527e-05, "loss": 0.0956, "step": 5093 }, { "epoch": 9.398891966759003, "grad_norm": 0.45242881774902344, "learning_rate": 1.5273e-05, "loss": 0.0403, "step": 5094 }, { "epoch": 9.400738688827332, "grad_norm": 0.44926634430885315, "learning_rate": 1.5276e-05, "loss": 0.0407, "step": 5095 }, { "epoch": 9.40258541089566, "grad_norm": 0.5183128118515015, "learning_rate": 1.5279e-05, "loss": 0.0396, "step": 5096 }, { "epoch": 9.404432132963988, "grad_norm": 0.7148613333702087, "learning_rate": 1.5282e-05, "loss": 0.0338, "step": 5097 }, { "epoch": 9.406278855032317, "grad_norm": 0.6550760269165039, "learning_rate": 1.5285e-05, "loss": 0.0405, "step": 5098 }, { "epoch": 9.408125577100646, "grad_norm": 0.7605519890785217, "learning_rate": 1.5288000000000003e-05, "loss": 0.0258, "step": 5099 }, { "epoch": 9.409972299168976, "grad_norm": 0.6842150092124939, "learning_rate": 1.5291000000000003e-05, "loss": 0.0257, "step": 5100 }, { "epoch": 9.411819021237303, "grad_norm": 0.653022050857544, "learning_rate": 1.5294000000000003e-05, "loss": 0.0464, "step": 5101 }, { "epoch": 9.413665743305632, "grad_norm": 0.610390841960907, "learning_rate": 1.5297e-05, "loss": 0.0324, "step": 5102 }, { "epoch": 9.415512465373961, "grad_norm": 0.4467082619667053, "learning_rate": 1.53e-05, "loss": 0.0303, "step": 5103 }, { "epoch": 9.41735918744229, "grad_norm": 0.7387998700141907, "learning_rate": 1.5303e-05, "loss": 0.0335, "step": 5104 }, { "epoch": 9.41920590951062, "grad_norm": 0.4913165271282196, "learning_rate": 1.5306e-05, "loss": 0.0213, "step": 5105 }, { "epoch": 9.421052631578947, "grad_norm": 0.8983688950538635, "learning_rate": 1.5309e-05, "loss": 0.0344, "step": 5106 }, { "epoch": 9.422899353647276, "grad_norm": 0.8142847418785095, "learning_rate": 1.5312e-05, "loss": 0.0212, "step": 5107 }, { "epoch": 9.424746075715605, "grad_norm": 0.7110851407051086, "learning_rate": 1.5314999999999998e-05, "loss": 0.0421, "step": 5108 }, { "epoch": 9.426592797783934, "grad_norm": 0.5607290267944336, "learning_rate": 1.5318e-05, "loss": 0.0224, "step": 5109 }, { "epoch": 9.428439519852262, "grad_norm": 0.4886728525161743, "learning_rate": 1.5321e-05, "loss": 0.0299, "step": 5110 }, { "epoch": 9.43028624192059, "grad_norm": 0.5855394005775452, "learning_rate": 1.5324e-05, "loss": 0.0333, "step": 5111 }, { "epoch": 9.43213296398892, "grad_norm": 0.7737128138542175, "learning_rate": 1.5327e-05, "loss": 0.0223, "step": 5112 }, { "epoch": 9.43397968605725, "grad_norm": 0.5125995874404907, "learning_rate": 1.533e-05, "loss": 0.0392, "step": 5113 }, { "epoch": 9.435826408125576, "grad_norm": 0.42056554555892944, "learning_rate": 1.5333e-05, "loss": 0.0208, "step": 5114 }, { "epoch": 9.437673130193906, "grad_norm": 0.7700082659721375, "learning_rate": 1.5336e-05, "loss": 0.0213, "step": 5115 }, { "epoch": 9.439519852262235, "grad_norm": 0.6500101685523987, "learning_rate": 1.5339e-05, "loss": 0.0304, "step": 5116 }, { "epoch": 9.441366574330564, "grad_norm": 0.7322611212730408, "learning_rate": 1.5342e-05, "loss": 0.043, "step": 5117 }, { "epoch": 9.443213296398891, "grad_norm": 1.0362110137939453, "learning_rate": 1.5345e-05, "loss": 0.034, "step": 5118 }, { "epoch": 9.44506001846722, "grad_norm": 0.7446939945220947, "learning_rate": 1.5348000000000003e-05, "loss": 0.0349, "step": 5119 }, { "epoch": 9.44690674053555, "grad_norm": 0.5886105298995972, "learning_rate": 1.5351000000000003e-05, "loss": 0.0264, "step": 5120 }, { "epoch": 9.448753462603879, "grad_norm": 0.48578187823295593, "learning_rate": 1.5354000000000002e-05, "loss": 0.0172, "step": 5121 }, { "epoch": 9.450600184672206, "grad_norm": 0.688308835029602, "learning_rate": 1.5357000000000002e-05, "loss": 0.0264, "step": 5122 }, { "epoch": 9.452446906740535, "grad_norm": 0.5412452816963196, "learning_rate": 1.5360000000000002e-05, "loss": 0.0138, "step": 5123 }, { "epoch": 9.454293628808864, "grad_norm": 0.7046781778335571, "learning_rate": 1.5363000000000002e-05, "loss": 0.0227, "step": 5124 }, { "epoch": 9.456140350877194, "grad_norm": 0.6773200631141663, "learning_rate": 1.5366e-05, "loss": 0.0427, "step": 5125 }, { "epoch": 9.45798707294552, "grad_norm": 1.3261581659317017, "learning_rate": 1.5368999999999998e-05, "loss": 0.0634, "step": 5126 }, { "epoch": 9.45983379501385, "grad_norm": 0.5345895886421204, "learning_rate": 1.5371999999999998e-05, "loss": 0.0376, "step": 5127 }, { "epoch": 9.46168051708218, "grad_norm": 0.697981595993042, "learning_rate": 1.5374999999999998e-05, "loss": 0.0487, "step": 5128 }, { "epoch": 9.463527239150508, "grad_norm": 0.8647690415382385, "learning_rate": 1.5377999999999997e-05, "loss": 0.2722, "step": 5129 }, { "epoch": 9.465373961218837, "grad_norm": 0.9083550572395325, "learning_rate": 1.5381e-05, "loss": 0.1937, "step": 5130 }, { "epoch": 9.467220683287165, "grad_norm": 0.7926074862480164, "learning_rate": 1.5384e-05, "loss": 0.2162, "step": 5131 }, { "epoch": 9.469067405355494, "grad_norm": 0.7760337591171265, "learning_rate": 1.5387e-05, "loss": 0.2132, "step": 5132 }, { "epoch": 9.470914127423823, "grad_norm": 0.8853809237480164, "learning_rate": 1.539e-05, "loss": 0.1809, "step": 5133 }, { "epoch": 9.472760849492152, "grad_norm": 1.001965045928955, "learning_rate": 1.5393e-05, "loss": 0.1805, "step": 5134 }, { "epoch": 9.47460757156048, "grad_norm": 1.8056459426879883, "learning_rate": 1.5396e-05, "loss": 0.1188, "step": 5135 }, { "epoch": 9.476454293628809, "grad_norm": 1.0790168046951294, "learning_rate": 1.5399e-05, "loss": 0.156, "step": 5136 }, { "epoch": 9.478301015697138, "grad_norm": 0.7434194087982178, "learning_rate": 1.5402e-05, "loss": 0.0947, "step": 5137 }, { "epoch": 9.480147737765467, "grad_norm": 0.6929402351379395, "learning_rate": 1.5405e-05, "loss": 0.0839, "step": 5138 }, { "epoch": 9.481994459833794, "grad_norm": 0.8843145966529846, "learning_rate": 1.5408e-05, "loss": 0.1342, "step": 5139 }, { "epoch": 9.483841181902124, "grad_norm": 0.37170612812042236, "learning_rate": 1.5411000000000002e-05, "loss": 0.0424, "step": 5140 }, { "epoch": 9.485687903970453, "grad_norm": 0.4109022617340088, "learning_rate": 1.5414000000000002e-05, "loss": 0.0548, "step": 5141 }, { "epoch": 9.487534626038782, "grad_norm": 0.4736909568309784, "learning_rate": 1.5417e-05, "loss": 0.0587, "step": 5142 }, { "epoch": 9.48938134810711, "grad_norm": 0.49091270565986633, "learning_rate": 1.542e-05, "loss": 0.0496, "step": 5143 }, { "epoch": 9.491228070175438, "grad_norm": 0.5085435509681702, "learning_rate": 1.5423e-05, "loss": 0.0316, "step": 5144 }, { "epoch": 9.493074792243767, "grad_norm": 0.350394606590271, "learning_rate": 1.5426e-05, "loss": 0.0277, "step": 5145 }, { "epoch": 9.494921514312097, "grad_norm": 0.6892826557159424, "learning_rate": 1.5429e-05, "loss": 0.0322, "step": 5146 }, { "epoch": 9.496768236380424, "grad_norm": 0.6446825861930847, "learning_rate": 1.5432e-05, "loss": 0.0246, "step": 5147 }, { "epoch": 9.498614958448753, "grad_norm": 0.79939204454422, "learning_rate": 1.5435e-05, "loss": 0.0732, "step": 5148 }, { "epoch": 9.500461680517082, "grad_norm": 0.6500365734100342, "learning_rate": 1.5438e-05, "loss": 0.0336, "step": 5149 }, { "epoch": 9.502308402585411, "grad_norm": 0.5178731679916382, "learning_rate": 1.5441000000000003e-05, "loss": 0.0348, "step": 5150 }, { "epoch": 9.504155124653739, "grad_norm": 0.3989321291446686, "learning_rate": 1.5444e-05, "loss": 0.018, "step": 5151 }, { "epoch": 9.506001846722068, "grad_norm": 0.9351484775543213, "learning_rate": 1.5447e-05, "loss": 0.0313, "step": 5152 }, { "epoch": 9.507848568790397, "grad_norm": 0.5877683162689209, "learning_rate": 1.545e-05, "loss": 0.0372, "step": 5153 }, { "epoch": 9.509695290858726, "grad_norm": 0.46480149030685425, "learning_rate": 1.5453e-05, "loss": 0.025, "step": 5154 }, { "epoch": 9.511542012927055, "grad_norm": 0.7051569223403931, "learning_rate": 1.5456e-05, "loss": 0.0375, "step": 5155 }, { "epoch": 9.513388734995383, "grad_norm": 0.5951721668243408, "learning_rate": 1.5459e-05, "loss": 0.0347, "step": 5156 }, { "epoch": 9.515235457063712, "grad_norm": 0.671683669090271, "learning_rate": 1.5462e-05, "loss": 0.0437, "step": 5157 }, { "epoch": 9.517082179132041, "grad_norm": 0.6410195231437683, "learning_rate": 1.5465e-05, "loss": 0.0548, "step": 5158 }, { "epoch": 9.51892890120037, "grad_norm": 0.5157730579376221, "learning_rate": 1.5467999999999998e-05, "loss": 0.0316, "step": 5159 }, { "epoch": 9.520775623268698, "grad_norm": 0.5009844303131104, "learning_rate": 1.5471e-05, "loss": 0.0218, "step": 5160 }, { "epoch": 9.522622345337027, "grad_norm": 0.39921513199806213, "learning_rate": 1.5474e-05, "loss": 0.0157, "step": 5161 }, { "epoch": 9.524469067405356, "grad_norm": 0.5840036273002625, "learning_rate": 1.5477e-05, "loss": 0.0308, "step": 5162 }, { "epoch": 9.526315789473685, "grad_norm": 0.5300602912902832, "learning_rate": 1.548e-05, "loss": 0.0255, "step": 5163 }, { "epoch": 9.528162511542012, "grad_norm": 0.5056509971618652, "learning_rate": 1.5483e-05, "loss": 0.0343, "step": 5164 }, { "epoch": 9.530009233610341, "grad_norm": 0.4027712941169739, "learning_rate": 1.5486e-05, "loss": 0.0291, "step": 5165 }, { "epoch": 9.53185595567867, "grad_norm": 0.46333715319633484, "learning_rate": 1.5489e-05, "loss": 0.0233, "step": 5166 }, { "epoch": 9.533702677747, "grad_norm": 0.651351273059845, "learning_rate": 1.5492e-05, "loss": 0.0306, "step": 5167 }, { "epoch": 9.535549399815327, "grad_norm": 0.5864402055740356, "learning_rate": 1.5495e-05, "loss": 0.023, "step": 5168 }, { "epoch": 9.537396121883656, "grad_norm": 0.5296105742454529, "learning_rate": 1.5498e-05, "loss": 0.0232, "step": 5169 }, { "epoch": 9.539242843951985, "grad_norm": 0.4870733916759491, "learning_rate": 1.5501000000000003e-05, "loss": 0.0235, "step": 5170 }, { "epoch": 9.541089566020315, "grad_norm": 0.5141538977622986, "learning_rate": 1.5504000000000003e-05, "loss": 0.0275, "step": 5171 }, { "epoch": 9.542936288088642, "grad_norm": 0.4707508683204651, "learning_rate": 1.5507000000000002e-05, "loss": 0.0225, "step": 5172 }, { "epoch": 9.544783010156971, "grad_norm": 0.8089979887008667, "learning_rate": 1.5510000000000002e-05, "loss": 0.0361, "step": 5173 }, { "epoch": 9.5466297322253, "grad_norm": 0.5475485920906067, "learning_rate": 1.5513000000000002e-05, "loss": 0.0279, "step": 5174 }, { "epoch": 9.54847645429363, "grad_norm": 2.153735399246216, "learning_rate": 1.5516000000000002e-05, "loss": 0.0351, "step": 5175 }, { "epoch": 9.550323176361957, "grad_norm": 1.0369545221328735, "learning_rate": 1.5518999999999998e-05, "loss": 0.0216, "step": 5176 }, { "epoch": 9.552169898430286, "grad_norm": 0.6523640751838684, "learning_rate": 1.5521999999999998e-05, "loss": 0.0243, "step": 5177 }, { "epoch": 9.554016620498615, "grad_norm": 0.887344479560852, "learning_rate": 1.5524999999999998e-05, "loss": 0.0567, "step": 5178 }, { "epoch": 9.555863342566944, "grad_norm": 1.1368447542190552, "learning_rate": 1.5527999999999998e-05, "loss": 0.2774, "step": 5179 }, { "epoch": 9.557710064635273, "grad_norm": 0.9110034704208374, "learning_rate": 1.5531e-05, "loss": 0.254, "step": 5180 }, { "epoch": 9.5595567867036, "grad_norm": 0.8574900031089783, "learning_rate": 1.5534e-05, "loss": 0.1996, "step": 5181 }, { "epoch": 9.56140350877193, "grad_norm": 0.7445184588432312, "learning_rate": 1.5537e-05, "loss": 0.1832, "step": 5182 }, { "epoch": 9.563250230840259, "grad_norm": 1.266960620880127, "learning_rate": 1.554e-05, "loss": 0.2373, "step": 5183 }, { "epoch": 9.565096952908588, "grad_norm": 0.8766116499900818, "learning_rate": 1.5543e-05, "loss": 0.1329, "step": 5184 }, { "epoch": 9.566943674976915, "grad_norm": 1.0444930791854858, "learning_rate": 1.5546e-05, "loss": 0.1283, "step": 5185 }, { "epoch": 9.568790397045245, "grad_norm": 0.7388608455657959, "learning_rate": 1.5549e-05, "loss": 0.1423, "step": 5186 }, { "epoch": 9.570637119113574, "grad_norm": 0.6338157057762146, "learning_rate": 1.5552e-05, "loss": 0.1518, "step": 5187 }, { "epoch": 9.572483841181903, "grad_norm": 0.5952925682067871, "learning_rate": 1.5555e-05, "loss": 0.1124, "step": 5188 }, { "epoch": 9.57433056325023, "grad_norm": 0.7095077037811279, "learning_rate": 1.5558e-05, "loss": 0.1288, "step": 5189 }, { "epoch": 9.57617728531856, "grad_norm": 0.5818418860435486, "learning_rate": 1.5561000000000002e-05, "loss": 0.0931, "step": 5190 }, { "epoch": 9.578024007386889, "grad_norm": 0.8665828704833984, "learning_rate": 1.5564000000000002e-05, "loss": 0.0822, "step": 5191 }, { "epoch": 9.579870729455218, "grad_norm": 0.9548938274383545, "learning_rate": 1.5567000000000002e-05, "loss": 0.0608, "step": 5192 }, { "epoch": 9.581717451523545, "grad_norm": 0.4729214310646057, "learning_rate": 1.5570000000000002e-05, "loss": 0.064, "step": 5193 }, { "epoch": 9.583564173591874, "grad_norm": 0.33552882075309753, "learning_rate": 1.5573e-05, "loss": 0.0273, "step": 5194 }, { "epoch": 9.585410895660203, "grad_norm": 0.8147560358047485, "learning_rate": 1.5576e-05, "loss": 0.0875, "step": 5195 }, { "epoch": 9.587257617728532, "grad_norm": 0.4922485947608948, "learning_rate": 1.5579e-05, "loss": 0.0491, "step": 5196 }, { "epoch": 9.58910433979686, "grad_norm": 3.5064287185668945, "learning_rate": 1.5582e-05, "loss": 0.0954, "step": 5197 }, { "epoch": 9.590951061865189, "grad_norm": 0.40575841069221497, "learning_rate": 1.5585e-05, "loss": 0.0368, "step": 5198 }, { "epoch": 9.592797783933518, "grad_norm": 0.425672322511673, "learning_rate": 1.5588e-05, "loss": 0.0279, "step": 5199 }, { "epoch": 9.594644506001847, "grad_norm": 0.4247608780860901, "learning_rate": 1.5591e-05, "loss": 0.0393, "step": 5200 }, { "epoch": 9.596491228070175, "grad_norm": 0.7301235795021057, "learning_rate": 1.5594e-05, "loss": 0.0619, "step": 5201 }, { "epoch": 9.598337950138504, "grad_norm": 0.4044654667377472, "learning_rate": 1.5597e-05, "loss": 0.0289, "step": 5202 }, { "epoch": 9.600184672206833, "grad_norm": 0.36088332533836365, "learning_rate": 1.56e-05, "loss": 0.0129, "step": 5203 }, { "epoch": 9.602031394275162, "grad_norm": 0.6777994632720947, "learning_rate": 1.5603e-05, "loss": 0.0395, "step": 5204 }, { "epoch": 9.603878116343491, "grad_norm": 0.2510547935962677, "learning_rate": 1.5606e-05, "loss": 0.0172, "step": 5205 }, { "epoch": 9.605724838411819, "grad_norm": 0.5264256596565247, "learning_rate": 1.5609e-05, "loss": 0.0282, "step": 5206 }, { "epoch": 9.607571560480148, "grad_norm": 0.5090929865837097, "learning_rate": 1.5612e-05, "loss": 0.0206, "step": 5207 }, { "epoch": 9.609418282548477, "grad_norm": 0.6527032852172852, "learning_rate": 1.5615e-05, "loss": 0.0314, "step": 5208 }, { "epoch": 9.611265004616806, "grad_norm": 0.5314533114433289, "learning_rate": 1.5618e-05, "loss": 0.0261, "step": 5209 }, { "epoch": 9.613111726685133, "grad_norm": 0.47839510440826416, "learning_rate": 1.5621000000000002e-05, "loss": 0.0234, "step": 5210 }, { "epoch": 9.614958448753463, "grad_norm": 0.5687962174415588, "learning_rate": 1.5624e-05, "loss": 0.0387, "step": 5211 }, { "epoch": 9.616805170821792, "grad_norm": 0.3908262848854065, "learning_rate": 1.5627e-05, "loss": 0.0175, "step": 5212 }, { "epoch": 9.61865189289012, "grad_norm": 0.4974033534526825, "learning_rate": 1.563e-05, "loss": 0.0267, "step": 5213 }, { "epoch": 9.620498614958448, "grad_norm": 0.7582276463508606, "learning_rate": 1.5633e-05, "loss": 0.0294, "step": 5214 }, { "epoch": 9.622345337026777, "grad_norm": 0.5293557047843933, "learning_rate": 1.5636e-05, "loss": 0.0246, "step": 5215 }, { "epoch": 9.624192059095106, "grad_norm": 0.5568930506706238, "learning_rate": 1.5639e-05, "loss": 0.0615, "step": 5216 }, { "epoch": 9.626038781163436, "grad_norm": 0.46155261993408203, "learning_rate": 1.5642e-05, "loss": 0.0252, "step": 5217 }, { "epoch": 9.627885503231763, "grad_norm": 0.7104062438011169, "learning_rate": 1.5645e-05, "loss": 0.0299, "step": 5218 }, { "epoch": 9.629732225300092, "grad_norm": 0.4467885196208954, "learning_rate": 1.5648e-05, "loss": 0.028, "step": 5219 }, { "epoch": 9.631578947368421, "grad_norm": 1.1483324766159058, "learning_rate": 1.5651000000000003e-05, "loss": 0.0344, "step": 5220 }, { "epoch": 9.63342566943675, "grad_norm": 0.6257169246673584, "learning_rate": 1.5654000000000003e-05, "loss": 0.0225, "step": 5221 }, { "epoch": 9.635272391505078, "grad_norm": 0.7972967028617859, "learning_rate": 1.5657000000000003e-05, "loss": 0.0343, "step": 5222 }, { "epoch": 9.637119113573407, "grad_norm": 0.5291518568992615, "learning_rate": 1.5660000000000003e-05, "loss": 0.0166, "step": 5223 }, { "epoch": 9.638965835641736, "grad_norm": 0.8691847324371338, "learning_rate": 1.5663000000000002e-05, "loss": 0.0359, "step": 5224 }, { "epoch": 9.640812557710065, "grad_norm": 0.5594950914382935, "learning_rate": 1.5666e-05, "loss": 0.0162, "step": 5225 }, { "epoch": 9.642659279778393, "grad_norm": 0.5064205527305603, "learning_rate": 1.5669e-05, "loss": 0.025, "step": 5226 }, { "epoch": 9.644506001846722, "grad_norm": 0.5738297700881958, "learning_rate": 1.5672e-05, "loss": 0.019, "step": 5227 }, { "epoch": 9.64635272391505, "grad_norm": 0.6175197958946228, "learning_rate": 1.5674999999999998e-05, "loss": 0.0538, "step": 5228 }, { "epoch": 9.64819944598338, "grad_norm": 2.0463693141937256, "learning_rate": 1.5677999999999998e-05, "loss": 0.3066, "step": 5229 }, { "epoch": 9.65004616805171, "grad_norm": 0.8259283304214478, "learning_rate": 1.5681e-05, "loss": 0.2584, "step": 5230 }, { "epoch": 9.651892890120036, "grad_norm": 0.6895362138748169, "learning_rate": 1.5684e-05, "loss": 0.191, "step": 5231 }, { "epoch": 9.653739612188366, "grad_norm": 0.6375547051429749, "learning_rate": 1.5687e-05, "loss": 0.1583, "step": 5232 }, { "epoch": 9.655586334256695, "grad_norm": 0.8149611353874207, "learning_rate": 1.569e-05, "loss": 0.1589, "step": 5233 }, { "epoch": 9.657433056325024, "grad_norm": 1.1168636083602905, "learning_rate": 1.5693e-05, "loss": 0.1955, "step": 5234 }, { "epoch": 9.659279778393351, "grad_norm": 1.1383134126663208, "learning_rate": 1.5696e-05, "loss": 0.182, "step": 5235 }, { "epoch": 9.66112650046168, "grad_norm": 0.8388357162475586, "learning_rate": 1.5699e-05, "loss": 0.138, "step": 5236 }, { "epoch": 9.66297322253001, "grad_norm": 0.8659719824790955, "learning_rate": 1.5702e-05, "loss": 0.1609, "step": 5237 }, { "epoch": 9.664819944598339, "grad_norm": 0.7714173793792725, "learning_rate": 1.5705e-05, "loss": 0.1067, "step": 5238 }, { "epoch": 9.666666666666666, "grad_norm": 0.5207611918449402, "learning_rate": 1.5708e-05, "loss": 0.0698, "step": 5239 }, { "epoch": 9.668513388734995, "grad_norm": 0.6840134859085083, "learning_rate": 1.5711000000000003e-05, "loss": 0.1231, "step": 5240 }, { "epoch": 9.670360110803324, "grad_norm": 1.1387262344360352, "learning_rate": 1.5714000000000002e-05, "loss": 0.1607, "step": 5241 }, { "epoch": 9.672206832871654, "grad_norm": 1.182800531387329, "learning_rate": 1.5717000000000002e-05, "loss": 0.093, "step": 5242 }, { "epoch": 9.67405355493998, "grad_norm": 0.5864893198013306, "learning_rate": 1.5720000000000002e-05, "loss": 0.0594, "step": 5243 }, { "epoch": 9.67590027700831, "grad_norm": 0.48163750767707825, "learning_rate": 1.5723000000000002e-05, "loss": 0.0634, "step": 5244 }, { "epoch": 9.67774699907664, "grad_norm": 0.5190600752830505, "learning_rate": 1.5726e-05, "loss": 0.0463, "step": 5245 }, { "epoch": 9.679593721144968, "grad_norm": 0.6937405467033386, "learning_rate": 1.5729e-05, "loss": 0.0225, "step": 5246 }, { "epoch": 9.681440443213296, "grad_norm": 0.33243173360824585, "learning_rate": 1.5732e-05, "loss": 0.0162, "step": 5247 }, { "epoch": 9.683287165281625, "grad_norm": 0.8670083284378052, "learning_rate": 1.5735e-05, "loss": 0.0254, "step": 5248 }, { "epoch": 9.685133887349954, "grad_norm": 0.6444321274757385, "learning_rate": 1.5737999999999997e-05, "loss": 0.027, "step": 5249 }, { "epoch": 9.686980609418283, "grad_norm": 0.6079081296920776, "learning_rate": 1.5741e-05, "loss": 0.048, "step": 5250 }, { "epoch": 9.68882733148661, "grad_norm": 0.6462955474853516, "learning_rate": 1.5744e-05, "loss": 0.0408, "step": 5251 }, { "epoch": 9.69067405355494, "grad_norm": 0.5033811330795288, "learning_rate": 1.5747e-05, "loss": 0.0309, "step": 5252 }, { "epoch": 9.692520775623269, "grad_norm": 0.43178778886795044, "learning_rate": 1.575e-05, "loss": 0.0221, "step": 5253 }, { "epoch": 9.694367497691598, "grad_norm": 0.7185611724853516, "learning_rate": 1.5753e-05, "loss": 0.0385, "step": 5254 }, { "epoch": 9.696214219759927, "grad_norm": 0.47433245182037354, "learning_rate": 1.5756e-05, "loss": 0.0174, "step": 5255 }, { "epoch": 9.698060941828254, "grad_norm": 0.3177076578140259, "learning_rate": 1.5759e-05, "loss": 0.0194, "step": 5256 }, { "epoch": 9.699907663896584, "grad_norm": 0.9755005836486816, "learning_rate": 1.5762e-05, "loss": 0.0293, "step": 5257 }, { "epoch": 9.701754385964913, "grad_norm": 0.418106347322464, "learning_rate": 1.5765e-05, "loss": 0.0269, "step": 5258 }, { "epoch": 9.703601108033242, "grad_norm": 0.6220974922180176, "learning_rate": 1.5768e-05, "loss": 0.0296, "step": 5259 }, { "epoch": 9.70544783010157, "grad_norm": 0.46836185455322266, "learning_rate": 1.5771e-05, "loss": 0.0562, "step": 5260 }, { "epoch": 9.707294552169898, "grad_norm": 0.5895121097564697, "learning_rate": 1.5774000000000002e-05, "loss": 0.0205, "step": 5261 }, { "epoch": 9.709141274238227, "grad_norm": 0.7849803566932678, "learning_rate": 1.5777e-05, "loss": 0.0263, "step": 5262 }, { "epoch": 9.710987996306557, "grad_norm": 0.47644442319869995, "learning_rate": 1.578e-05, "loss": 0.0294, "step": 5263 }, { "epoch": 9.712834718374884, "grad_norm": 0.5167056322097778, "learning_rate": 1.5783e-05, "loss": 0.0344, "step": 5264 }, { "epoch": 9.714681440443213, "grad_norm": 0.6035995483398438, "learning_rate": 1.5786e-05, "loss": 0.0229, "step": 5265 }, { "epoch": 9.716528162511542, "grad_norm": 0.35058900713920593, "learning_rate": 1.5789e-05, "loss": 0.0231, "step": 5266 }, { "epoch": 9.718374884579871, "grad_norm": 0.4649684429168701, "learning_rate": 1.5792e-05, "loss": 0.0298, "step": 5267 }, { "epoch": 9.720221606648199, "grad_norm": 0.44932129979133606, "learning_rate": 1.5795e-05, "loss": 0.0292, "step": 5268 }, { "epoch": 9.722068328716528, "grad_norm": 0.5442031025886536, "learning_rate": 1.5798e-05, "loss": 0.0238, "step": 5269 }, { "epoch": 9.723915050784857, "grad_norm": 0.40109357237815857, "learning_rate": 1.5801e-05, "loss": 0.0245, "step": 5270 }, { "epoch": 9.725761772853186, "grad_norm": 0.7246041297912598, "learning_rate": 1.5804000000000003e-05, "loss": 0.0334, "step": 5271 }, { "epoch": 9.727608494921514, "grad_norm": 0.42517438530921936, "learning_rate": 1.5807000000000003e-05, "loss": 0.0216, "step": 5272 }, { "epoch": 9.729455216989843, "grad_norm": 0.7946247458457947, "learning_rate": 1.5810000000000003e-05, "loss": 0.0425, "step": 5273 }, { "epoch": 9.731301939058172, "grad_norm": 0.7544342875480652, "learning_rate": 1.5813e-05, "loss": 0.0392, "step": 5274 }, { "epoch": 9.733148661126501, "grad_norm": 0.5644515752792358, "learning_rate": 1.5816e-05, "loss": 0.0273, "step": 5275 }, { "epoch": 9.734995383194828, "grad_norm": 0.614993155002594, "learning_rate": 1.5819e-05, "loss": 0.0538, "step": 5276 }, { "epoch": 9.736842105263158, "grad_norm": 1.1540549993515015, "learning_rate": 1.5822e-05, "loss": 0.0494, "step": 5277 }, { "epoch": 9.738688827331487, "grad_norm": 0.692342221736908, "learning_rate": 1.5825e-05, "loss": 0.0325, "step": 5278 }, { "epoch": 9.740535549399816, "grad_norm": 1.2595493793487549, "learning_rate": 1.5827999999999998e-05, "loss": 0.3012, "step": 5279 }, { "epoch": 9.742382271468145, "grad_norm": 0.8239694833755493, "learning_rate": 1.5830999999999998e-05, "loss": 0.239, "step": 5280 }, { "epoch": 9.744228993536472, "grad_norm": 0.7055354118347168, "learning_rate": 1.5834e-05, "loss": 0.2103, "step": 5281 }, { "epoch": 9.746075715604801, "grad_norm": 0.6928851008415222, "learning_rate": 1.5837e-05, "loss": 0.1638, "step": 5282 }, { "epoch": 9.74792243767313, "grad_norm": 1.6198242902755737, "learning_rate": 1.584e-05, "loss": 0.1653, "step": 5283 }, { "epoch": 9.749769159741458, "grad_norm": 1.1449764966964722, "learning_rate": 1.5843e-05, "loss": 0.2131, "step": 5284 }, { "epoch": 9.751615881809787, "grad_norm": 0.7790804505348206, "learning_rate": 1.5846e-05, "loss": 0.1631, "step": 5285 }, { "epoch": 9.753462603878116, "grad_norm": 0.7726112008094788, "learning_rate": 1.5849e-05, "loss": 0.1724, "step": 5286 }, { "epoch": 9.755309325946445, "grad_norm": 0.742899477481842, "learning_rate": 1.5852e-05, "loss": 0.1177, "step": 5287 }, { "epoch": 9.757156048014775, "grad_norm": 1.6110416650772095, "learning_rate": 1.5855e-05, "loss": 0.1097, "step": 5288 }, { "epoch": 9.759002770083102, "grad_norm": 0.6606073975563049, "learning_rate": 1.5858e-05, "loss": 0.0952, "step": 5289 }, { "epoch": 9.760849492151431, "grad_norm": 1.041293740272522, "learning_rate": 1.5861e-05, "loss": 0.0871, "step": 5290 }, { "epoch": 9.76269621421976, "grad_norm": 1.2442162036895752, "learning_rate": 1.5864000000000003e-05, "loss": 0.1605, "step": 5291 }, { "epoch": 9.76454293628809, "grad_norm": 0.8230966329574585, "learning_rate": 1.5867000000000002e-05, "loss": 0.101, "step": 5292 }, { "epoch": 9.766389658356417, "grad_norm": 0.45017313957214355, "learning_rate": 1.5870000000000002e-05, "loss": 0.0534, "step": 5293 }, { "epoch": 9.768236380424746, "grad_norm": 0.5587928295135498, "learning_rate": 1.5873000000000002e-05, "loss": 0.0422, "step": 5294 }, { "epoch": 9.770083102493075, "grad_norm": 0.41939765214920044, "learning_rate": 1.5876000000000002e-05, "loss": 0.0211, "step": 5295 }, { "epoch": 9.771929824561404, "grad_norm": 1.1486107110977173, "learning_rate": 1.5879e-05, "loss": 0.0782, "step": 5296 }, { "epoch": 9.773776546629731, "grad_norm": 0.740656852722168, "learning_rate": 1.5882e-05, "loss": 0.0474, "step": 5297 }, { "epoch": 9.77562326869806, "grad_norm": 0.46551042795181274, "learning_rate": 1.5884999999999998e-05, "loss": 0.0435, "step": 5298 }, { "epoch": 9.77746999076639, "grad_norm": 0.5009204745292664, "learning_rate": 1.5887999999999998e-05, "loss": 0.0341, "step": 5299 }, { "epoch": 9.779316712834719, "grad_norm": 0.34932154417037964, "learning_rate": 1.5890999999999997e-05, "loss": 0.0348, "step": 5300 }, { "epoch": 9.781163434903046, "grad_norm": 0.5163803696632385, "learning_rate": 1.5894e-05, "loss": 0.0389, "step": 5301 }, { "epoch": 9.783010156971375, "grad_norm": 0.5265400409698486, "learning_rate": 1.5897e-05, "loss": 0.0258, "step": 5302 }, { "epoch": 9.784856879039705, "grad_norm": 0.6005827784538269, "learning_rate": 1.59e-05, "loss": 0.0322, "step": 5303 }, { "epoch": 9.786703601108034, "grad_norm": 0.4704049825668335, "learning_rate": 1.5903e-05, "loss": 0.0343, "step": 5304 }, { "epoch": 9.788550323176363, "grad_norm": 0.4133896827697754, "learning_rate": 1.5906e-05, "loss": 0.0246, "step": 5305 }, { "epoch": 9.79039704524469, "grad_norm": 0.7306255102157593, "learning_rate": 1.5909e-05, "loss": 0.0306, "step": 5306 }, { "epoch": 9.79224376731302, "grad_norm": 1.2488560676574707, "learning_rate": 1.5912e-05, "loss": 0.0696, "step": 5307 }, { "epoch": 9.794090489381349, "grad_norm": 0.40144163370132446, "learning_rate": 1.5915e-05, "loss": 0.0272, "step": 5308 }, { "epoch": 9.795937211449676, "grad_norm": 0.4051397442817688, "learning_rate": 1.5918e-05, "loss": 0.0233, "step": 5309 }, { "epoch": 9.797783933518005, "grad_norm": 0.4358450472354889, "learning_rate": 1.5921e-05, "loss": 0.0322, "step": 5310 }, { "epoch": 9.799630655586334, "grad_norm": 0.5932910442352295, "learning_rate": 1.5924000000000002e-05, "loss": 0.0334, "step": 5311 }, { "epoch": 9.801477377654663, "grad_norm": 0.4162185490131378, "learning_rate": 1.5927000000000002e-05, "loss": 0.0206, "step": 5312 }, { "epoch": 9.803324099722992, "grad_norm": 0.5718238353729248, "learning_rate": 1.593e-05, "loss": 0.024, "step": 5313 }, { "epoch": 9.80517082179132, "grad_norm": 0.5045569539070129, "learning_rate": 1.5933e-05, "loss": 0.0296, "step": 5314 }, { "epoch": 9.807017543859649, "grad_norm": 0.4584674537181854, "learning_rate": 1.5936e-05, "loss": 0.0232, "step": 5315 }, { "epoch": 9.808864265927978, "grad_norm": 0.4227735698223114, "learning_rate": 1.5939e-05, "loss": 0.0343, "step": 5316 }, { "epoch": 9.810710987996307, "grad_norm": 0.8416896462440491, "learning_rate": 1.5942e-05, "loss": 0.0274, "step": 5317 }, { "epoch": 9.812557710064635, "grad_norm": 0.551142156124115, "learning_rate": 1.5945e-05, "loss": 0.0344, "step": 5318 }, { "epoch": 9.814404432132964, "grad_norm": 0.9386904239654541, "learning_rate": 1.5948e-05, "loss": 0.0517, "step": 5319 }, { "epoch": 9.816251154201293, "grad_norm": 0.8260375261306763, "learning_rate": 1.5951e-05, "loss": 0.0394, "step": 5320 }, { "epoch": 9.818097876269622, "grad_norm": 0.6223652362823486, "learning_rate": 1.5954000000000003e-05, "loss": 0.0328, "step": 5321 }, { "epoch": 9.81994459833795, "grad_norm": 1.2013148069381714, "learning_rate": 1.5957000000000003e-05, "loss": 0.0415, "step": 5322 }, { "epoch": 9.821791320406279, "grad_norm": 0.4488355815410614, "learning_rate": 1.596e-05, "loss": 0.0281, "step": 5323 }, { "epoch": 9.823638042474608, "grad_norm": 0.8077351450920105, "learning_rate": 1.5963e-05, "loss": 0.034, "step": 5324 }, { "epoch": 9.825484764542937, "grad_norm": 0.6670114398002625, "learning_rate": 1.5966e-05, "loss": 0.0314, "step": 5325 }, { "epoch": 9.827331486611264, "grad_norm": 0.5995292663574219, "learning_rate": 1.5969e-05, "loss": 0.0353, "step": 5326 }, { "epoch": 9.829178208679593, "grad_norm": 0.5224243402481079, "learning_rate": 1.5972e-05, "loss": 0.0346, "step": 5327 }, { "epoch": 9.831024930747922, "grad_norm": 1.5100773572921753, "learning_rate": 1.5975e-05, "loss": 0.0494, "step": 5328 }, { "epoch": 9.832871652816252, "grad_norm": 1.619434118270874, "learning_rate": 1.5978e-05, "loss": 0.2553, "step": 5329 }, { "epoch": 9.83471837488458, "grad_norm": 1.285167932510376, "learning_rate": 1.5980999999999998e-05, "loss": 0.2833, "step": 5330 }, { "epoch": 9.836565096952908, "grad_norm": 3.9550986289978027, "learning_rate": 1.5984e-05, "loss": 0.2325, "step": 5331 }, { "epoch": 9.838411819021237, "grad_norm": 0.7910865545272827, "learning_rate": 1.5987e-05, "loss": 0.2184, "step": 5332 }, { "epoch": 9.840258541089566, "grad_norm": 0.6185511350631714, "learning_rate": 1.599e-05, "loss": 0.1437, "step": 5333 }, { "epoch": 9.842105263157894, "grad_norm": 1.0045210123062134, "learning_rate": 1.5993e-05, "loss": 0.2016, "step": 5334 }, { "epoch": 9.843951985226223, "grad_norm": 0.8364124298095703, "learning_rate": 1.5996e-05, "loss": 0.1727, "step": 5335 }, { "epoch": 9.845798707294552, "grad_norm": 0.6798272728919983, "learning_rate": 1.5999e-05, "loss": 0.1071, "step": 5336 }, { "epoch": 9.847645429362881, "grad_norm": 1.0942049026489258, "learning_rate": 1.6002e-05, "loss": 0.1737, "step": 5337 }, { "epoch": 9.84949215143121, "grad_norm": 0.6949169635772705, "learning_rate": 1.6005e-05, "loss": 0.1016, "step": 5338 }, { "epoch": 9.851338873499538, "grad_norm": 1.0689623355865479, "learning_rate": 1.6008e-05, "loss": 0.1219, "step": 5339 }, { "epoch": 9.853185595567867, "grad_norm": 0.654719352722168, "learning_rate": 1.6011e-05, "loss": 0.0776, "step": 5340 }, { "epoch": 9.855032317636196, "grad_norm": 0.8142164945602417, "learning_rate": 1.6014000000000003e-05, "loss": 0.0958, "step": 5341 }, { "epoch": 9.856879039704525, "grad_norm": 0.4266529083251953, "learning_rate": 1.6017000000000003e-05, "loss": 0.042, "step": 5342 }, { "epoch": 9.858725761772853, "grad_norm": 0.3962545692920685, "learning_rate": 1.6020000000000002e-05, "loss": 0.0574, "step": 5343 }, { "epoch": 9.860572483841182, "grad_norm": 0.7206045985221863, "learning_rate": 1.6023000000000002e-05, "loss": 0.0626, "step": 5344 }, { "epoch": 9.86241920590951, "grad_norm": 0.48156070709228516, "learning_rate": 1.6026000000000002e-05, "loss": 0.0341, "step": 5345 }, { "epoch": 9.86426592797784, "grad_norm": 0.4663195312023163, "learning_rate": 1.6029000000000002e-05, "loss": 0.0355, "step": 5346 }, { "epoch": 9.866112650046167, "grad_norm": 0.30054986476898193, "learning_rate": 1.6032e-05, "loss": 0.026, "step": 5347 }, { "epoch": 9.867959372114496, "grad_norm": 0.8619183897972107, "learning_rate": 1.6034999999999998e-05, "loss": 0.1128, "step": 5348 }, { "epoch": 9.869806094182826, "grad_norm": 0.4234899580478668, "learning_rate": 1.6037999999999998e-05, "loss": 0.0335, "step": 5349 }, { "epoch": 9.871652816251155, "grad_norm": 0.6339716911315918, "learning_rate": 1.6040999999999998e-05, "loss": 0.0373, "step": 5350 }, { "epoch": 9.873499538319482, "grad_norm": 0.31937074661254883, "learning_rate": 1.6044e-05, "loss": 0.0212, "step": 5351 }, { "epoch": 9.875346260387811, "grad_norm": 0.38986673951148987, "learning_rate": 1.6047e-05, "loss": 0.022, "step": 5352 }, { "epoch": 9.87719298245614, "grad_norm": 0.32344773411750793, "learning_rate": 1.605e-05, "loss": 0.025, "step": 5353 }, { "epoch": 9.87903970452447, "grad_norm": 0.8871376514434814, "learning_rate": 1.6053e-05, "loss": 0.0318, "step": 5354 }, { "epoch": 9.880886426592799, "grad_norm": 0.5399925112724304, "learning_rate": 1.6056e-05, "loss": 0.0381, "step": 5355 }, { "epoch": 9.882733148661126, "grad_norm": 0.47385239601135254, "learning_rate": 1.6059e-05, "loss": 0.0299, "step": 5356 }, { "epoch": 9.884579870729455, "grad_norm": 0.38570722937583923, "learning_rate": 1.6062e-05, "loss": 0.0275, "step": 5357 }, { "epoch": 9.886426592797784, "grad_norm": 0.4128206670284271, "learning_rate": 1.6065e-05, "loss": 0.0209, "step": 5358 }, { "epoch": 9.888273314866112, "grad_norm": 0.5899372100830078, "learning_rate": 1.6068e-05, "loss": 0.0328, "step": 5359 }, { "epoch": 9.89012003693444, "grad_norm": 0.6390036940574646, "learning_rate": 1.6071e-05, "loss": 0.0197, "step": 5360 }, { "epoch": 9.89196675900277, "grad_norm": 0.531987726688385, "learning_rate": 1.6074000000000002e-05, "loss": 0.0251, "step": 5361 }, { "epoch": 9.8938134810711, "grad_norm": 0.4834712743759155, "learning_rate": 1.6077000000000002e-05, "loss": 0.0223, "step": 5362 }, { "epoch": 9.895660203139428, "grad_norm": 0.43583738803863525, "learning_rate": 1.6080000000000002e-05, "loss": 0.0277, "step": 5363 }, { "epoch": 9.897506925207756, "grad_norm": 0.49375590682029724, "learning_rate": 1.6083000000000002e-05, "loss": 0.0253, "step": 5364 }, { "epoch": 9.899353647276085, "grad_norm": 0.6179859042167664, "learning_rate": 1.6086e-05, "loss": 0.0212, "step": 5365 }, { "epoch": 9.901200369344414, "grad_norm": 0.5361495614051819, "learning_rate": 1.6089e-05, "loss": 0.0349, "step": 5366 }, { "epoch": 9.903047091412743, "grad_norm": 1.1141771078109741, "learning_rate": 1.6092e-05, "loss": 0.0301, "step": 5367 }, { "epoch": 9.90489381348107, "grad_norm": 0.7264317870140076, "learning_rate": 1.6095e-05, "loss": 0.0301, "step": 5368 }, { "epoch": 9.9067405355494, "grad_norm": 0.6218966245651245, "learning_rate": 1.6098e-05, "loss": 0.0379, "step": 5369 }, { "epoch": 9.908587257617729, "grad_norm": 0.8654443621635437, "learning_rate": 1.6101e-05, "loss": 0.0406, "step": 5370 }, { "epoch": 9.910433979686058, "grad_norm": 0.6884783506393433, "learning_rate": 1.6104000000000004e-05, "loss": 0.0286, "step": 5371 }, { "epoch": 9.912280701754385, "grad_norm": 0.5119009017944336, "learning_rate": 1.6107e-05, "loss": 0.0299, "step": 5372 }, { "epoch": 9.914127423822714, "grad_norm": 0.6698522567749023, "learning_rate": 1.611e-05, "loss": 0.0292, "step": 5373 }, { "epoch": 9.915974145891044, "grad_norm": 0.7594621181488037, "learning_rate": 1.6113e-05, "loss": 0.0333, "step": 5374 }, { "epoch": 9.917820867959373, "grad_norm": 0.4515618085861206, "learning_rate": 1.6116e-05, "loss": 0.0234, "step": 5375 }, { "epoch": 9.9196675900277, "grad_norm": 0.6473972797393799, "learning_rate": 1.6119e-05, "loss": 0.0345, "step": 5376 }, { "epoch": 9.92151431209603, "grad_norm": 0.7409936189651489, "learning_rate": 1.6122e-05, "loss": 0.0439, "step": 5377 }, { "epoch": 9.923361034164358, "grad_norm": 0.7425601482391357, "learning_rate": 1.6125e-05, "loss": 0.0607, "step": 5378 }, { "epoch": 9.925207756232687, "grad_norm": 0.9278039336204529, "learning_rate": 1.6128e-05, "loss": 0.2801, "step": 5379 }, { "epoch": 9.927054478301017, "grad_norm": 0.7289472818374634, "learning_rate": 1.6131e-05, "loss": 0.1843, "step": 5380 }, { "epoch": 9.928901200369344, "grad_norm": 1.1333955526351929, "learning_rate": 1.6134e-05, "loss": 0.1941, "step": 5381 }, { "epoch": 9.930747922437673, "grad_norm": 0.8633908629417419, "learning_rate": 1.6137e-05, "loss": 0.2418, "step": 5382 }, { "epoch": 9.932594644506002, "grad_norm": 0.829034686088562, "learning_rate": 1.614e-05, "loss": 0.1373, "step": 5383 }, { "epoch": 9.93444136657433, "grad_norm": 0.5944445729255676, "learning_rate": 1.6143e-05, "loss": 0.1451, "step": 5384 }, { "epoch": 9.936288088642659, "grad_norm": 0.7495405673980713, "learning_rate": 1.6146e-05, "loss": 0.1112, "step": 5385 }, { "epoch": 9.938134810710988, "grad_norm": 0.6639820337295532, "learning_rate": 1.6149e-05, "loss": 0.1407, "step": 5386 }, { "epoch": 9.939981532779317, "grad_norm": 0.7890506982803345, "learning_rate": 1.6152e-05, "loss": 0.1097, "step": 5387 }, { "epoch": 9.941828254847646, "grad_norm": 1.096908450126648, "learning_rate": 1.6155e-05, "loss": 0.0857, "step": 5388 }, { "epoch": 9.943674976915974, "grad_norm": 1.0947462320327759, "learning_rate": 1.6158e-05, "loss": 0.1497, "step": 5389 }, { "epoch": 9.945521698984303, "grad_norm": 0.8100894093513489, "learning_rate": 1.6161e-05, "loss": 0.0883, "step": 5390 }, { "epoch": 9.947368421052632, "grad_norm": 0.5456356406211853, "learning_rate": 1.6164e-05, "loss": 0.0527, "step": 5391 }, { "epoch": 9.949215143120961, "grad_norm": 0.5579842329025269, "learning_rate": 1.6167000000000003e-05, "loss": 0.0433, "step": 5392 }, { "epoch": 9.951061865189288, "grad_norm": 0.6612244248390198, "learning_rate": 1.6170000000000003e-05, "loss": 0.0377, "step": 5393 }, { "epoch": 9.952908587257618, "grad_norm": 0.4671032726764679, "learning_rate": 1.6173000000000003e-05, "loss": 0.0245, "step": 5394 }, { "epoch": 9.954755309325947, "grad_norm": 0.4639342725276947, "learning_rate": 1.6176000000000002e-05, "loss": 0.0338, "step": 5395 }, { "epoch": 9.956602031394276, "grad_norm": 0.38254842162132263, "learning_rate": 1.6179000000000002e-05, "loss": 0.0173, "step": 5396 }, { "epoch": 9.958448753462603, "grad_norm": 0.34761661291122437, "learning_rate": 1.6182e-05, "loss": 0.0267, "step": 5397 }, { "epoch": 9.960295475530932, "grad_norm": 0.5771934390068054, "learning_rate": 1.6185e-05, "loss": 0.0379, "step": 5398 }, { "epoch": 9.962142197599261, "grad_norm": 0.7256296873092651, "learning_rate": 1.6187999999999998e-05, "loss": 0.0503, "step": 5399 }, { "epoch": 9.96398891966759, "grad_norm": 0.4355110824108124, "learning_rate": 1.6190999999999998e-05, "loss": 0.0237, "step": 5400 }, { "epoch": 9.965835641735918, "grad_norm": 0.6564544439315796, "learning_rate": 1.6193999999999998e-05, "loss": 0.0701, "step": 5401 }, { "epoch": 9.967682363804247, "grad_norm": 0.4925614595413208, "learning_rate": 1.6197e-05, "loss": 0.0291, "step": 5402 }, { "epoch": 9.969529085872576, "grad_norm": 0.6966110467910767, "learning_rate": 1.62e-05, "loss": 0.0253, "step": 5403 }, { "epoch": 9.971375807940905, "grad_norm": 0.7741514444351196, "learning_rate": 1.6203e-05, "loss": 0.0212, "step": 5404 }, { "epoch": 9.973222530009235, "grad_norm": 0.5057776570320129, "learning_rate": 1.6206e-05, "loss": 0.0318, "step": 5405 }, { "epoch": 9.975069252077562, "grad_norm": 0.621398389339447, "learning_rate": 1.6209e-05, "loss": 0.0308, "step": 5406 }, { "epoch": 9.976915974145891, "grad_norm": 0.4574577510356903, "learning_rate": 1.6212e-05, "loss": 0.0226, "step": 5407 }, { "epoch": 9.97876269621422, "grad_norm": 0.5730082988739014, "learning_rate": 1.6215e-05, "loss": 0.0198, "step": 5408 }, { "epoch": 9.980609418282548, "grad_norm": 0.752869188785553, "learning_rate": 1.6218e-05, "loss": 0.0228, "step": 5409 }, { "epoch": 9.982456140350877, "grad_norm": 0.513892650604248, "learning_rate": 1.6221e-05, "loss": 0.0255, "step": 5410 }, { "epoch": 9.984302862419206, "grad_norm": 1.2083364725112915, "learning_rate": 1.6224e-05, "loss": 0.0294, "step": 5411 }, { "epoch": 9.986149584487535, "grad_norm": 0.5639918446540833, "learning_rate": 1.6227000000000002e-05, "loss": 0.0238, "step": 5412 }, { "epoch": 9.987996306555864, "grad_norm": 4.152475833892822, "learning_rate": 1.6230000000000002e-05, "loss": 0.0333, "step": 5413 }, { "epoch": 9.989843028624191, "grad_norm": 0.846754252910614, "learning_rate": 1.6233000000000002e-05, "loss": 0.0329, "step": 5414 }, { "epoch": 9.99168975069252, "grad_norm": 0.6781851053237915, "learning_rate": 1.6236000000000002e-05, "loss": 0.0388, "step": 5415 }, { "epoch": 9.99353647276085, "grad_norm": 0.8813036680221558, "learning_rate": 1.6239e-05, "loss": 0.0421, "step": 5416 }, { "epoch": 9.995383194829179, "grad_norm": 0.561560332775116, "learning_rate": 1.6242e-05, "loss": 0.0227, "step": 5417 }, { "epoch": 9.997229916897506, "grad_norm": 0.7628563642501831, "learning_rate": 1.6245e-05, "loss": 0.0295, "step": 5418 }, { "epoch": 9.999076638965835, "grad_norm": 0.5719330906867981, "learning_rate": 1.6248e-05, "loss": 0.0157, "step": 5419 }, { "epoch": 10.0, "grad_norm": 0.5679962635040283, "learning_rate": 1.6251e-05, "loss": 0.0167, "step": 5420 }, { "epoch": 10.00184672206833, "grad_norm": 1.5406891107559204, "learning_rate": 1.6253999999999997e-05, "loss": 0.3427, "step": 5421 }, { "epoch": 10.003693444136658, "grad_norm": 0.8477674722671509, "learning_rate": 1.6257e-05, "loss": 0.2991, "step": 5422 }, { "epoch": 10.005540166204986, "grad_norm": 1.0089524984359741, "learning_rate": 1.626e-05, "loss": 0.2128, "step": 5423 }, { "epoch": 10.007386888273315, "grad_norm": 1.0328800678253174, "learning_rate": 1.6263e-05, "loss": 0.1977, "step": 5424 }, { "epoch": 10.009233610341644, "grad_norm": 0.9635773301124573, "learning_rate": 1.6266e-05, "loss": 0.2169, "step": 5425 }, { "epoch": 10.011080332409973, "grad_norm": 1.2987926006317139, "learning_rate": 1.6269e-05, "loss": 0.1697, "step": 5426 }, { "epoch": 10.0129270544783, "grad_norm": 0.6195570826530457, "learning_rate": 1.6272e-05, "loss": 0.1087, "step": 5427 }, { "epoch": 10.01477377654663, "grad_norm": 0.6779178977012634, "learning_rate": 1.6275e-05, "loss": 0.1107, "step": 5428 }, { "epoch": 10.016620498614959, "grad_norm": 0.6723268628120422, "learning_rate": 1.6278e-05, "loss": 0.1222, "step": 5429 }, { "epoch": 10.018467220683288, "grad_norm": 0.6540348529815674, "learning_rate": 1.6281e-05, "loss": 0.1055, "step": 5430 }, { "epoch": 10.020313942751615, "grad_norm": 0.764249324798584, "learning_rate": 1.6284e-05, "loss": 0.1055, "step": 5431 }, { "epoch": 10.022160664819944, "grad_norm": 0.5885652899742126, "learning_rate": 1.6287000000000002e-05, "loss": 0.0961, "step": 5432 }, { "epoch": 10.024007386888274, "grad_norm": 1.157502293586731, "learning_rate": 1.629e-05, "loss": 0.1551, "step": 5433 }, { "epoch": 10.025854108956603, "grad_norm": 0.5454378128051758, "learning_rate": 1.6293e-05, "loss": 0.0723, "step": 5434 }, { "epoch": 10.02770083102493, "grad_norm": 0.4833084046840668, "learning_rate": 1.6296e-05, "loss": 0.0472, "step": 5435 }, { "epoch": 10.02954755309326, "grad_norm": 0.7118651866912842, "learning_rate": 1.6299e-05, "loss": 0.0354, "step": 5436 }, { "epoch": 10.031394275161588, "grad_norm": 0.5361140370368958, "learning_rate": 1.6302e-05, "loss": 0.0412, "step": 5437 }, { "epoch": 10.033240997229917, "grad_norm": 0.43500009179115295, "learning_rate": 1.6305e-05, "loss": 0.0236, "step": 5438 }, { "epoch": 10.035087719298245, "grad_norm": 0.989443302154541, "learning_rate": 1.6308e-05, "loss": 0.0288, "step": 5439 }, { "epoch": 10.036934441366574, "grad_norm": 0.3764720857143402, "learning_rate": 1.6311e-05, "loss": 0.0343, "step": 5440 }, { "epoch": 10.038781163434903, "grad_norm": 0.3913370966911316, "learning_rate": 1.6314e-05, "loss": 0.019, "step": 5441 }, { "epoch": 10.040627885503232, "grad_norm": 1.057128667831421, "learning_rate": 1.6317000000000003e-05, "loss": 0.0333, "step": 5442 }, { "epoch": 10.04247460757156, "grad_norm": 0.6400501132011414, "learning_rate": 1.6320000000000003e-05, "loss": 0.0311, "step": 5443 }, { "epoch": 10.044321329639889, "grad_norm": 0.4104997515678406, "learning_rate": 1.6323000000000003e-05, "loss": 0.0223, "step": 5444 }, { "epoch": 10.046168051708218, "grad_norm": 0.33504095673561096, "learning_rate": 1.6326000000000003e-05, "loss": 0.0284, "step": 5445 }, { "epoch": 10.048014773776547, "grad_norm": 0.4359169602394104, "learning_rate": 1.6329e-05, "loss": 0.0251, "step": 5446 }, { "epoch": 10.049861495844876, "grad_norm": 0.46953126788139343, "learning_rate": 1.6332e-05, "loss": 0.0294, "step": 5447 }, { "epoch": 10.051708217913204, "grad_norm": 0.7887614369392395, "learning_rate": 1.6335e-05, "loss": 0.0274, "step": 5448 }, { "epoch": 10.053554939981533, "grad_norm": 0.3279338777065277, "learning_rate": 1.6338e-05, "loss": 0.022, "step": 5449 }, { "epoch": 10.055401662049862, "grad_norm": 0.5057511329650879, "learning_rate": 1.6340999999999998e-05, "loss": 0.0316, "step": 5450 }, { "epoch": 10.057248384118191, "grad_norm": 0.5215173363685608, "learning_rate": 1.6343999999999998e-05, "loss": 0.019, "step": 5451 }, { "epoch": 10.059095106186518, "grad_norm": 0.42165639996528625, "learning_rate": 1.6347e-05, "loss": 0.019, "step": 5452 }, { "epoch": 10.060941828254848, "grad_norm": 0.33034011721611023, "learning_rate": 1.635e-05, "loss": 0.0165, "step": 5453 }, { "epoch": 10.062788550323177, "grad_norm": 0.45684513449668884, "learning_rate": 1.6353e-05, "loss": 0.0194, "step": 5454 }, { "epoch": 10.064635272391506, "grad_norm": 0.5774962306022644, "learning_rate": 1.6356e-05, "loss": 0.0394, "step": 5455 }, { "epoch": 10.066481994459833, "grad_norm": 0.4379504323005676, "learning_rate": 1.6359e-05, "loss": 0.0187, "step": 5456 }, { "epoch": 10.068328716528162, "grad_norm": 0.7650175094604492, "learning_rate": 1.6362e-05, "loss": 0.0215, "step": 5457 }, { "epoch": 10.070175438596491, "grad_norm": 0.5422001481056213, "learning_rate": 1.6365e-05, "loss": 0.0536, "step": 5458 }, { "epoch": 10.07202216066482, "grad_norm": 0.333640456199646, "learning_rate": 1.6368e-05, "loss": 0.0154, "step": 5459 }, { "epoch": 10.073868882733148, "grad_norm": 1.1780778169631958, "learning_rate": 1.6371e-05, "loss": 0.0174, "step": 5460 }, { "epoch": 10.075715604801477, "grad_norm": 0.7993797063827515, "learning_rate": 1.6374e-05, "loss": 0.0287, "step": 5461 }, { "epoch": 10.077562326869806, "grad_norm": 0.7349343299865723, "learning_rate": 1.6377000000000003e-05, "loss": 0.0304, "step": 5462 }, { "epoch": 10.079409048938135, "grad_norm": 0.8012458086013794, "learning_rate": 1.6380000000000002e-05, "loss": 0.0229, "step": 5463 }, { "epoch": 10.081255771006463, "grad_norm": 0.5809488892555237, "learning_rate": 1.6383000000000002e-05, "loss": 0.026, "step": 5464 }, { "epoch": 10.083102493074792, "grad_norm": 0.6947927474975586, "learning_rate": 1.6386000000000002e-05, "loss": 0.0334, "step": 5465 }, { "epoch": 10.084949215143121, "grad_norm": 1.0784257650375366, "learning_rate": 1.6389000000000002e-05, "loss": 0.032, "step": 5466 }, { "epoch": 10.08679593721145, "grad_norm": 0.4056265950202942, "learning_rate": 1.6392e-05, "loss": 0.017, "step": 5467 }, { "epoch": 10.088642659279778, "grad_norm": 0.5124925374984741, "learning_rate": 1.6395e-05, "loss": 0.0279, "step": 5468 }, { "epoch": 10.090489381348107, "grad_norm": 1.012369990348816, "learning_rate": 1.6398e-05, "loss": 0.0267, "step": 5469 }, { "epoch": 10.092336103416436, "grad_norm": 0.6609354615211487, "learning_rate": 1.6400999999999998e-05, "loss": 0.0279, "step": 5470 }, { "epoch": 10.094182825484765, "grad_norm": 1.0703595876693726, "learning_rate": 1.6403999999999997e-05, "loss": 0.2938, "step": 5471 }, { "epoch": 10.096029547553094, "grad_norm": 0.7762547731399536, "learning_rate": 1.6407e-05, "loss": 0.1815, "step": 5472 }, { "epoch": 10.097876269621421, "grad_norm": 0.7497743368148804, "learning_rate": 1.641e-05, "loss": 0.1767, "step": 5473 }, { "epoch": 10.09972299168975, "grad_norm": 0.7290788888931274, "learning_rate": 1.6413e-05, "loss": 0.1451, "step": 5474 }, { "epoch": 10.10156971375808, "grad_norm": 0.6357746124267578, "learning_rate": 1.6416e-05, "loss": 0.141, "step": 5475 }, { "epoch": 10.103416435826409, "grad_norm": 0.7867652773857117, "learning_rate": 1.6419e-05, "loss": 0.1779, "step": 5476 }, { "epoch": 10.105263157894736, "grad_norm": 0.5439184308052063, "learning_rate": 1.6422e-05, "loss": 0.1147, "step": 5477 }, { "epoch": 10.107109879963065, "grad_norm": 0.6986681818962097, "learning_rate": 1.6425e-05, "loss": 0.174, "step": 5478 }, { "epoch": 10.108956602031395, "grad_norm": 0.5804990530014038, "learning_rate": 1.6428e-05, "loss": 0.0873, "step": 5479 }, { "epoch": 10.110803324099724, "grad_norm": 1.0001838207244873, "learning_rate": 1.6431e-05, "loss": 0.1138, "step": 5480 }, { "epoch": 10.112650046168051, "grad_norm": 0.825629711151123, "learning_rate": 1.6434e-05, "loss": 0.0916, "step": 5481 }, { "epoch": 10.11449676823638, "grad_norm": 0.7747324705123901, "learning_rate": 1.6437000000000002e-05, "loss": 0.0803, "step": 5482 }, { "epoch": 10.11634349030471, "grad_norm": 0.9804370999336243, "learning_rate": 1.6440000000000002e-05, "loss": 0.0415, "step": 5483 }, { "epoch": 10.118190212373039, "grad_norm": 0.7696126699447632, "learning_rate": 1.6443e-05, "loss": 0.0668, "step": 5484 }, { "epoch": 10.120036934441366, "grad_norm": 0.39905205368995667, "learning_rate": 1.6446e-05, "loss": 0.0409, "step": 5485 }, { "epoch": 10.121883656509695, "grad_norm": 0.40738794207572937, "learning_rate": 1.6449e-05, "loss": 0.0553, "step": 5486 }, { "epoch": 10.123730378578024, "grad_norm": 0.3792731761932373, "learning_rate": 1.6452e-05, "loss": 0.0366, "step": 5487 }, { "epoch": 10.125577100646353, "grad_norm": 0.538812518119812, "learning_rate": 1.6455e-05, "loss": 0.0338, "step": 5488 }, { "epoch": 10.12742382271468, "grad_norm": 0.6880744695663452, "learning_rate": 1.6458e-05, "loss": 0.0274, "step": 5489 }, { "epoch": 10.12927054478301, "grad_norm": 0.2780337333679199, "learning_rate": 1.6461e-05, "loss": 0.0161, "step": 5490 }, { "epoch": 10.131117266851339, "grad_norm": 0.5699805021286011, "learning_rate": 1.6464e-05, "loss": 0.02, "step": 5491 }, { "epoch": 10.132963988919668, "grad_norm": 0.7288239598274231, "learning_rate": 1.6467000000000003e-05, "loss": 0.0529, "step": 5492 }, { "epoch": 10.134810710987995, "grad_norm": 0.5717664361000061, "learning_rate": 1.6470000000000003e-05, "loss": 0.0315, "step": 5493 }, { "epoch": 10.136657433056325, "grad_norm": 0.36545732617378235, "learning_rate": 1.6473000000000003e-05, "loss": 0.0219, "step": 5494 }, { "epoch": 10.138504155124654, "grad_norm": 0.3427481949329376, "learning_rate": 1.6476e-05, "loss": 0.0238, "step": 5495 }, { "epoch": 10.140350877192983, "grad_norm": 0.461640328168869, "learning_rate": 1.6479e-05, "loss": 0.0238, "step": 5496 }, { "epoch": 10.142197599261312, "grad_norm": 0.5584130883216858, "learning_rate": 1.6482e-05, "loss": 0.0276, "step": 5497 }, { "epoch": 10.14404432132964, "grad_norm": 0.376208633184433, "learning_rate": 1.6485e-05, "loss": 0.0187, "step": 5498 }, { "epoch": 10.145891043397969, "grad_norm": 0.3295646607875824, "learning_rate": 1.6488e-05, "loss": 0.023, "step": 5499 }, { "epoch": 10.147737765466298, "grad_norm": 0.40510445833206177, "learning_rate": 1.6491e-05, "loss": 0.0235, "step": 5500 }, { "epoch": 10.149584487534627, "grad_norm": 0.594293475151062, "learning_rate": 1.6493999999999998e-05, "loss": 0.0188, "step": 5501 }, { "epoch": 10.151431209602954, "grad_norm": 1.6221593618392944, "learning_rate": 1.6497e-05, "loss": 0.0352, "step": 5502 }, { "epoch": 10.153277931671283, "grad_norm": 0.4105490446090698, "learning_rate": 1.65e-05, "loss": 0.0213, "step": 5503 }, { "epoch": 10.155124653739612, "grad_norm": 0.5947759747505188, "learning_rate": 1.6503e-05, "loss": 0.0268, "step": 5504 }, { "epoch": 10.156971375807942, "grad_norm": 0.3585164546966553, "learning_rate": 1.6506e-05, "loss": 0.0177, "step": 5505 }, { "epoch": 10.158818097876269, "grad_norm": 0.435248464345932, "learning_rate": 1.6509e-05, "loss": 0.0262, "step": 5506 }, { "epoch": 10.160664819944598, "grad_norm": 0.31813377141952515, "learning_rate": 1.6512e-05, "loss": 0.0187, "step": 5507 }, { "epoch": 10.162511542012927, "grad_norm": 0.569756031036377, "learning_rate": 1.6515e-05, "loss": 0.018, "step": 5508 }, { "epoch": 10.164358264081256, "grad_norm": 0.31594976782798767, "learning_rate": 1.6518e-05, "loss": 0.013, "step": 5509 }, { "epoch": 10.166204986149584, "grad_norm": 0.3902089297771454, "learning_rate": 1.6521e-05, "loss": 0.0197, "step": 5510 }, { "epoch": 10.168051708217913, "grad_norm": 0.5651479363441467, "learning_rate": 1.6524e-05, "loss": 0.0185, "step": 5511 }, { "epoch": 10.169898430286242, "grad_norm": 0.45485809445381165, "learning_rate": 1.6527e-05, "loss": 0.0256, "step": 5512 }, { "epoch": 10.171745152354571, "grad_norm": 1.6518807411193848, "learning_rate": 1.6530000000000003e-05, "loss": 0.0612, "step": 5513 }, { "epoch": 10.173591874422899, "grad_norm": 0.7069046497344971, "learning_rate": 1.6533000000000002e-05, "loss": 0.0348, "step": 5514 }, { "epoch": 10.175438596491228, "grad_norm": 0.6123530864715576, "learning_rate": 1.6536000000000002e-05, "loss": 0.0275, "step": 5515 }, { "epoch": 10.177285318559557, "grad_norm": 0.7783622145652771, "learning_rate": 1.6539000000000002e-05, "loss": 0.0379, "step": 5516 }, { "epoch": 10.179132040627886, "grad_norm": 0.5361220836639404, "learning_rate": 1.6542000000000002e-05, "loss": 0.0175, "step": 5517 }, { "epoch": 10.180978762696213, "grad_norm": 0.7196072340011597, "learning_rate": 1.6545e-05, "loss": 0.0519, "step": 5518 }, { "epoch": 10.182825484764543, "grad_norm": 0.8551514744758606, "learning_rate": 1.6548e-05, "loss": 0.0255, "step": 5519 }, { "epoch": 10.184672206832872, "grad_norm": 0.9311538338661194, "learning_rate": 1.6550999999999998e-05, "loss": 0.025, "step": 5520 }, { "epoch": 10.1865189289012, "grad_norm": 0.8415346145629883, "learning_rate": 1.6553999999999998e-05, "loss": 0.2651, "step": 5521 }, { "epoch": 10.18836565096953, "grad_norm": 0.9645931720733643, "learning_rate": 1.6556999999999998e-05, "loss": 0.2365, "step": 5522 }, { "epoch": 10.190212373037857, "grad_norm": 0.8362430930137634, "learning_rate": 1.656e-05, "loss": 0.2345, "step": 5523 }, { "epoch": 10.192059095106186, "grad_norm": 0.7902693152427673, "learning_rate": 1.6563e-05, "loss": 0.2174, "step": 5524 }, { "epoch": 10.193905817174516, "grad_norm": 0.5826321840286255, "learning_rate": 1.6566e-05, "loss": 0.1437, "step": 5525 }, { "epoch": 10.195752539242845, "grad_norm": 1.1524338722229004, "learning_rate": 1.6569e-05, "loss": 0.1946, "step": 5526 }, { "epoch": 10.197599261311172, "grad_norm": 0.7611707448959351, "learning_rate": 1.6572e-05, "loss": 0.1437, "step": 5527 }, { "epoch": 10.199445983379501, "grad_norm": 0.6333263516426086, "learning_rate": 1.6575e-05, "loss": 0.1312, "step": 5528 }, { "epoch": 10.20129270544783, "grad_norm": 0.619972288608551, "learning_rate": 1.6578e-05, "loss": 0.0986, "step": 5529 }, { "epoch": 10.20313942751616, "grad_norm": 0.6319988369941711, "learning_rate": 1.6581e-05, "loss": 0.0828, "step": 5530 }, { "epoch": 10.204986149584487, "grad_norm": 0.7655250430107117, "learning_rate": 1.6584e-05, "loss": 0.1116, "step": 5531 }, { "epoch": 10.206832871652816, "grad_norm": 0.5639950633049011, "learning_rate": 1.6587e-05, "loss": 0.0625, "step": 5532 }, { "epoch": 10.208679593721145, "grad_norm": 0.5196364521980286, "learning_rate": 1.6590000000000002e-05, "loss": 0.0564, "step": 5533 }, { "epoch": 10.210526315789474, "grad_norm": 0.5493685603141785, "learning_rate": 1.6593000000000002e-05, "loss": 0.048, "step": 5534 }, { "epoch": 10.212373037857802, "grad_norm": 0.6512869000434875, "learning_rate": 1.6596000000000002e-05, "loss": 0.065, "step": 5535 }, { "epoch": 10.21421975992613, "grad_norm": 0.5324305295944214, "learning_rate": 1.6599e-05, "loss": 0.0356, "step": 5536 }, { "epoch": 10.21606648199446, "grad_norm": 0.4373769164085388, "learning_rate": 1.6602e-05, "loss": 0.041, "step": 5537 }, { "epoch": 10.21791320406279, "grad_norm": 0.880097508430481, "learning_rate": 1.6605e-05, "loss": 0.0487, "step": 5538 }, { "epoch": 10.219759926131117, "grad_norm": 0.47192707657814026, "learning_rate": 1.6608e-05, "loss": 0.0328, "step": 5539 }, { "epoch": 10.221606648199446, "grad_norm": 0.4588438868522644, "learning_rate": 1.6611e-05, "loss": 0.0295, "step": 5540 }, { "epoch": 10.223453370267775, "grad_norm": 1.0013659000396729, "learning_rate": 1.6614e-05, "loss": 0.0305, "step": 5541 }, { "epoch": 10.225300092336104, "grad_norm": 0.5650994181632996, "learning_rate": 1.6617e-05, "loss": 0.0268, "step": 5542 }, { "epoch": 10.227146814404431, "grad_norm": 0.46585944294929504, "learning_rate": 1.6620000000000004e-05, "loss": 0.0288, "step": 5543 }, { "epoch": 10.22899353647276, "grad_norm": 0.528458833694458, "learning_rate": 1.6623e-05, "loss": 0.0294, "step": 5544 }, { "epoch": 10.23084025854109, "grad_norm": 0.4273636043071747, "learning_rate": 1.6626e-05, "loss": 0.0301, "step": 5545 }, { "epoch": 10.232686980609419, "grad_norm": 0.4764959514141083, "learning_rate": 1.6629e-05, "loss": 0.033, "step": 5546 }, { "epoch": 10.234533702677748, "grad_norm": 0.4240407943725586, "learning_rate": 1.6632e-05, "loss": 0.0642, "step": 5547 }, { "epoch": 10.236380424746075, "grad_norm": 0.4711458086967468, "learning_rate": 1.6635e-05, "loss": 0.0262, "step": 5548 }, { "epoch": 10.238227146814404, "grad_norm": 0.6241109371185303, "learning_rate": 1.6638e-05, "loss": 0.0384, "step": 5549 }, { "epoch": 10.240073868882734, "grad_norm": 0.37781938910484314, "learning_rate": 1.6641e-05, "loss": 0.0184, "step": 5550 }, { "epoch": 10.241920590951063, "grad_norm": 0.4634665548801422, "learning_rate": 1.6644e-05, "loss": 0.0156, "step": 5551 }, { "epoch": 10.24376731301939, "grad_norm": 0.5928099155426025, "learning_rate": 1.6647e-05, "loss": 0.0205, "step": 5552 }, { "epoch": 10.24561403508772, "grad_norm": 0.38055887818336487, "learning_rate": 1.665e-05, "loss": 0.022, "step": 5553 }, { "epoch": 10.247460757156048, "grad_norm": 0.8946309685707092, "learning_rate": 1.6653e-05, "loss": 0.0341, "step": 5554 }, { "epoch": 10.249307479224377, "grad_norm": 0.8753093481063843, "learning_rate": 1.6656e-05, "loss": 0.0254, "step": 5555 }, { "epoch": 10.251154201292705, "grad_norm": 0.44015374779701233, "learning_rate": 1.6659e-05, "loss": 0.0263, "step": 5556 }, { "epoch": 10.253000923361034, "grad_norm": 0.39704692363739014, "learning_rate": 1.6662e-05, "loss": 0.0269, "step": 5557 }, { "epoch": 10.254847645429363, "grad_norm": 0.4797463119029999, "learning_rate": 1.6665e-05, "loss": 0.0266, "step": 5558 }, { "epoch": 10.256694367497692, "grad_norm": 0.4630764126777649, "learning_rate": 1.6668e-05, "loss": 0.039, "step": 5559 }, { "epoch": 10.25854108956602, "grad_norm": 1.1624733209609985, "learning_rate": 1.6671e-05, "loss": 0.0357, "step": 5560 }, { "epoch": 10.260387811634349, "grad_norm": 0.6615108251571655, "learning_rate": 1.6674e-05, "loss": 0.0284, "step": 5561 }, { "epoch": 10.262234533702678, "grad_norm": 0.5912197828292847, "learning_rate": 1.6677e-05, "loss": 0.0233, "step": 5562 }, { "epoch": 10.264081255771007, "grad_norm": 0.5102552771568298, "learning_rate": 1.6680000000000003e-05, "loss": 0.0235, "step": 5563 }, { "epoch": 10.265927977839334, "grad_norm": 0.49518391489982605, "learning_rate": 1.6683000000000003e-05, "loss": 0.0322, "step": 5564 }, { "epoch": 10.267774699907664, "grad_norm": 0.9089075922966003, "learning_rate": 1.6686000000000003e-05, "loss": 0.024, "step": 5565 }, { "epoch": 10.269621421975993, "grad_norm": 0.49683696031570435, "learning_rate": 1.6689000000000002e-05, "loss": 0.0216, "step": 5566 }, { "epoch": 10.271468144044322, "grad_norm": 0.3209707736968994, "learning_rate": 1.6692000000000002e-05, "loss": 0.0191, "step": 5567 }, { "epoch": 10.27331486611265, "grad_norm": 0.567535400390625, "learning_rate": 1.6695000000000002e-05, "loss": 0.0253, "step": 5568 }, { "epoch": 10.275161588180978, "grad_norm": 0.8037060499191284, "learning_rate": 1.6698e-05, "loss": 0.0349, "step": 5569 }, { "epoch": 10.277008310249307, "grad_norm": 0.6364205479621887, "learning_rate": 1.6700999999999998e-05, "loss": 0.0263, "step": 5570 }, { "epoch": 10.278855032317637, "grad_norm": 1.1712576150894165, "learning_rate": 1.6703999999999998e-05, "loss": 0.2833, "step": 5571 }, { "epoch": 10.280701754385966, "grad_norm": 0.941981852054596, "learning_rate": 1.6706999999999998e-05, "loss": 0.247, "step": 5572 }, { "epoch": 10.282548476454293, "grad_norm": 0.8911486864089966, "learning_rate": 1.671e-05, "loss": 0.1956, "step": 5573 }, { "epoch": 10.284395198522622, "grad_norm": 0.7403378486633301, "learning_rate": 1.6713e-05, "loss": 0.1719, "step": 5574 }, { "epoch": 10.286241920590951, "grad_norm": 0.7996092438697815, "learning_rate": 1.6716e-05, "loss": 0.2207, "step": 5575 }, { "epoch": 10.28808864265928, "grad_norm": 0.6285210847854614, "learning_rate": 1.6719e-05, "loss": 0.1455, "step": 5576 }, { "epoch": 10.289935364727608, "grad_norm": 0.5551503896713257, "learning_rate": 1.6722e-05, "loss": 0.1133, "step": 5577 }, { "epoch": 10.291782086795937, "grad_norm": 0.8680622577667236, "learning_rate": 1.6725e-05, "loss": 0.1461, "step": 5578 }, { "epoch": 10.293628808864266, "grad_norm": 0.5695521235466003, "learning_rate": 1.6728e-05, "loss": 0.0919, "step": 5579 }, { "epoch": 10.295475530932595, "grad_norm": 0.557476818561554, "learning_rate": 1.6731e-05, "loss": 0.0961, "step": 5580 }, { "epoch": 10.297322253000923, "grad_norm": 0.6611366868019104, "learning_rate": 1.6734e-05, "loss": 0.083, "step": 5581 }, { "epoch": 10.299168975069252, "grad_norm": 0.5024586319923401, "learning_rate": 1.6737e-05, "loss": 0.0883, "step": 5582 }, { "epoch": 10.301015697137581, "grad_norm": 0.6985763311386108, "learning_rate": 1.6740000000000002e-05, "loss": 0.1743, "step": 5583 }, { "epoch": 10.30286241920591, "grad_norm": 1.2900344133377075, "learning_rate": 1.6743000000000002e-05, "loss": 0.0664, "step": 5584 }, { "epoch": 10.304709141274238, "grad_norm": 0.5589216947555542, "learning_rate": 1.6746000000000002e-05, "loss": 0.1042, "step": 5585 }, { "epoch": 10.306555863342567, "grad_norm": 0.47162994742393494, "learning_rate": 1.6749000000000002e-05, "loss": 0.0592, "step": 5586 }, { "epoch": 10.308402585410896, "grad_norm": 0.6191331148147583, "learning_rate": 1.6752e-05, "loss": 0.0937, "step": 5587 }, { "epoch": 10.310249307479225, "grad_norm": 0.3441529870033264, "learning_rate": 1.6755e-05, "loss": 0.03, "step": 5588 }, { "epoch": 10.312096029547552, "grad_norm": 0.2752731144428253, "learning_rate": 1.6758e-05, "loss": 0.0227, "step": 5589 }, { "epoch": 10.313942751615881, "grad_norm": 0.4862821698188782, "learning_rate": 1.6761e-05, "loss": 0.0195, "step": 5590 }, { "epoch": 10.31578947368421, "grad_norm": 0.40299078822135925, "learning_rate": 1.6764e-05, "loss": 0.0218, "step": 5591 }, { "epoch": 10.31763619575254, "grad_norm": 0.4200470745563507, "learning_rate": 1.6767e-05, "loss": 0.0201, "step": 5592 }, { "epoch": 10.319482917820867, "grad_norm": 0.7158517241477966, "learning_rate": 1.677e-05, "loss": 0.0411, "step": 5593 }, { "epoch": 10.321329639889196, "grad_norm": 0.8751426935195923, "learning_rate": 1.6773e-05, "loss": 0.025, "step": 5594 }, { "epoch": 10.323176361957525, "grad_norm": 0.5317754149436951, "learning_rate": 1.6776e-05, "loss": 0.0209, "step": 5595 }, { "epoch": 10.325023084025855, "grad_norm": 0.4970802664756775, "learning_rate": 1.6779e-05, "loss": 0.0179, "step": 5596 }, { "epoch": 10.326869806094184, "grad_norm": 0.6410852670669556, "learning_rate": 1.6782e-05, "loss": 0.0358, "step": 5597 }, { "epoch": 10.328716528162511, "grad_norm": 0.6019672155380249, "learning_rate": 1.6785e-05, "loss": 0.0274, "step": 5598 }, { "epoch": 10.33056325023084, "grad_norm": 0.484224796295166, "learning_rate": 1.6788e-05, "loss": 0.0514, "step": 5599 }, { "epoch": 10.33240997229917, "grad_norm": 0.7805466055870056, "learning_rate": 1.6791e-05, "loss": 0.019, "step": 5600 }, { "epoch": 10.334256694367498, "grad_norm": 0.4458668529987335, "learning_rate": 1.6794e-05, "loss": 0.0247, "step": 5601 }, { "epoch": 10.336103416435826, "grad_norm": 0.7102533578872681, "learning_rate": 1.6797e-05, "loss": 0.0279, "step": 5602 }, { "epoch": 10.337950138504155, "grad_norm": 0.5157403349876404, "learning_rate": 1.6800000000000002e-05, "loss": 0.0196, "step": 5603 }, { "epoch": 10.339796860572484, "grad_norm": 0.41661450266838074, "learning_rate": 1.6803e-05, "loss": 0.0266, "step": 5604 }, { "epoch": 10.341643582640813, "grad_norm": 0.47312384843826294, "learning_rate": 1.6806e-05, "loss": 0.0152, "step": 5605 }, { "epoch": 10.34349030470914, "grad_norm": 0.9088107943534851, "learning_rate": 1.6809e-05, "loss": 0.0176, "step": 5606 }, { "epoch": 10.34533702677747, "grad_norm": 0.7505862712860107, "learning_rate": 1.6812e-05, "loss": 0.0283, "step": 5607 }, { "epoch": 10.347183748845799, "grad_norm": 0.49464118480682373, "learning_rate": 1.6815e-05, "loss": 0.0221, "step": 5608 }, { "epoch": 10.349030470914128, "grad_norm": 0.7184174656867981, "learning_rate": 1.6818e-05, "loss": 0.0328, "step": 5609 }, { "epoch": 10.350877192982455, "grad_norm": 0.5819430947303772, "learning_rate": 1.6821e-05, "loss": 0.0281, "step": 5610 }, { "epoch": 10.352723915050785, "grad_norm": 0.4279172718524933, "learning_rate": 1.6824e-05, "loss": 0.0223, "step": 5611 }, { "epoch": 10.354570637119114, "grad_norm": 0.764571487903595, "learning_rate": 1.6827e-05, "loss": 0.0269, "step": 5612 }, { "epoch": 10.356417359187443, "grad_norm": 0.6644816398620605, "learning_rate": 1.6830000000000003e-05, "loss": 0.0284, "step": 5613 }, { "epoch": 10.35826408125577, "grad_norm": 0.5781840085983276, "learning_rate": 1.6833000000000003e-05, "loss": 0.0405, "step": 5614 }, { "epoch": 10.3601108033241, "grad_norm": 0.5660284161567688, "learning_rate": 1.6836000000000003e-05, "loss": 0.0254, "step": 5615 }, { "epoch": 10.361957525392429, "grad_norm": 0.47080352902412415, "learning_rate": 1.6839000000000003e-05, "loss": 0.0246, "step": 5616 }, { "epoch": 10.363804247460758, "grad_norm": 0.5529908537864685, "learning_rate": 1.6842000000000002e-05, "loss": 0.0368, "step": 5617 }, { "epoch": 10.365650969529085, "grad_norm": 0.3346571624279022, "learning_rate": 1.6845e-05, "loss": 0.0129, "step": 5618 }, { "epoch": 10.367497691597414, "grad_norm": 0.9121596813201904, "learning_rate": 1.6848e-05, "loss": 0.0431, "step": 5619 }, { "epoch": 10.369344413665743, "grad_norm": 0.632863461971283, "learning_rate": 1.6851e-05, "loss": 0.0424, "step": 5620 }, { "epoch": 10.371191135734072, "grad_norm": 1.32807457447052, "learning_rate": 1.6853999999999998e-05, "loss": 0.2963, "step": 5621 }, { "epoch": 10.373037857802402, "grad_norm": 0.9964393973350525, "learning_rate": 1.6856999999999998e-05, "loss": 0.2261, "step": 5622 }, { "epoch": 10.374884579870729, "grad_norm": 1.0543564558029175, "learning_rate": 1.686e-05, "loss": 0.2349, "step": 5623 }, { "epoch": 10.376731301939058, "grad_norm": 0.904670000076294, "learning_rate": 1.6863e-05, "loss": 0.2302, "step": 5624 }, { "epoch": 10.378578024007387, "grad_norm": 2.453029155731201, "learning_rate": 1.6866e-05, "loss": 0.2143, "step": 5625 }, { "epoch": 10.380424746075716, "grad_norm": 0.6642909646034241, "learning_rate": 1.6869e-05, "loss": 0.1311, "step": 5626 }, { "epoch": 10.382271468144044, "grad_norm": 0.669899582862854, "learning_rate": 1.6872e-05, "loss": 0.1325, "step": 5627 }, { "epoch": 10.384118190212373, "grad_norm": 0.9939351677894592, "learning_rate": 1.6875e-05, "loss": 0.1419, "step": 5628 }, { "epoch": 10.385964912280702, "grad_norm": 0.9026162624359131, "learning_rate": 1.6878e-05, "loss": 0.0871, "step": 5629 }, { "epoch": 10.387811634349031, "grad_norm": 0.716627299785614, "learning_rate": 1.6881e-05, "loss": 0.0975, "step": 5630 }, { "epoch": 10.389658356417359, "grad_norm": 0.48917216062545776, "learning_rate": 1.6884e-05, "loss": 0.0757, "step": 5631 }, { "epoch": 10.391505078485688, "grad_norm": 0.6990576982498169, "learning_rate": 1.6887e-05, "loss": 0.1063, "step": 5632 }, { "epoch": 10.393351800554017, "grad_norm": 0.3678179681301117, "learning_rate": 1.689e-05, "loss": 0.0446, "step": 5633 }, { "epoch": 10.395198522622346, "grad_norm": 0.44908279180526733, "learning_rate": 1.6893000000000002e-05, "loss": 0.0445, "step": 5634 }, { "epoch": 10.397045244690673, "grad_norm": 0.5445517897605896, "learning_rate": 1.6896000000000002e-05, "loss": 0.0554, "step": 5635 }, { "epoch": 10.398891966759003, "grad_norm": 0.5841280817985535, "learning_rate": 1.6899000000000002e-05, "loss": 0.0216, "step": 5636 }, { "epoch": 10.400738688827332, "grad_norm": 0.38346558809280396, "learning_rate": 1.6902000000000002e-05, "loss": 0.0285, "step": 5637 }, { "epoch": 10.40258541089566, "grad_norm": 0.5853798389434814, "learning_rate": 1.6905e-05, "loss": 0.0246, "step": 5638 }, { "epoch": 10.404432132963988, "grad_norm": 0.5012631416320801, "learning_rate": 1.6908e-05, "loss": 0.0289, "step": 5639 }, { "epoch": 10.406278855032317, "grad_norm": 0.5409550666809082, "learning_rate": 1.6911e-05, "loss": 0.0317, "step": 5640 }, { "epoch": 10.408125577100646, "grad_norm": 0.390082448720932, "learning_rate": 1.6914e-05, "loss": 0.0208, "step": 5641 }, { "epoch": 10.409972299168976, "grad_norm": 0.6462604999542236, "learning_rate": 1.6916999999999997e-05, "loss": 0.0282, "step": 5642 }, { "epoch": 10.411819021237303, "grad_norm": 0.5268653631210327, "learning_rate": 1.6919999999999997e-05, "loss": 0.0282, "step": 5643 }, { "epoch": 10.413665743305632, "grad_norm": 0.41686591506004333, "learning_rate": 1.6923e-05, "loss": 0.0189, "step": 5644 }, { "epoch": 10.415512465373961, "grad_norm": 0.4445785582065582, "learning_rate": 1.6926e-05, "loss": 0.0317, "step": 5645 }, { "epoch": 10.41735918744229, "grad_norm": 1.0424392223358154, "learning_rate": 1.6929e-05, "loss": 0.0307, "step": 5646 }, { "epoch": 10.41920590951062, "grad_norm": 0.38552019000053406, "learning_rate": 1.6932e-05, "loss": 0.0336, "step": 5647 }, { "epoch": 10.421052631578947, "grad_norm": 0.5564932823181152, "learning_rate": 1.6935e-05, "loss": 0.0234, "step": 5648 }, { "epoch": 10.422899353647276, "grad_norm": 0.45230332016944885, "learning_rate": 1.6938e-05, "loss": 0.0286, "step": 5649 }, { "epoch": 10.424746075715605, "grad_norm": 0.5506929755210876, "learning_rate": 1.6941e-05, "loss": 0.035, "step": 5650 }, { "epoch": 10.426592797783934, "grad_norm": 0.39401277899742126, "learning_rate": 1.6944e-05, "loss": 0.0451, "step": 5651 }, { "epoch": 10.428439519852262, "grad_norm": 0.4682321846485138, "learning_rate": 1.6947e-05, "loss": 0.0223, "step": 5652 }, { "epoch": 10.43028624192059, "grad_norm": 0.4107759892940521, "learning_rate": 1.695e-05, "loss": 0.0194, "step": 5653 }, { "epoch": 10.43213296398892, "grad_norm": 0.38645249605178833, "learning_rate": 1.6953000000000002e-05, "loss": 0.0196, "step": 5654 }, { "epoch": 10.43397968605725, "grad_norm": 0.48322468996047974, "learning_rate": 1.6956e-05, "loss": 0.0163, "step": 5655 }, { "epoch": 10.435826408125576, "grad_norm": 0.5782933235168457, "learning_rate": 1.6959e-05, "loss": 0.0313, "step": 5656 }, { "epoch": 10.437673130193906, "grad_norm": 0.5378473997116089, "learning_rate": 1.6962e-05, "loss": 0.04, "step": 5657 }, { "epoch": 10.439519852262235, "grad_norm": 0.4453236162662506, "learning_rate": 1.6965e-05, "loss": 0.0222, "step": 5658 }, { "epoch": 10.441366574330564, "grad_norm": 0.6218374967575073, "learning_rate": 1.6968e-05, "loss": 0.0322, "step": 5659 }, { "epoch": 10.443213296398891, "grad_norm": 0.6248608231544495, "learning_rate": 1.6971e-05, "loss": 0.0274, "step": 5660 }, { "epoch": 10.44506001846722, "grad_norm": 0.4168800413608551, "learning_rate": 1.6974e-05, "loss": 0.0258, "step": 5661 }, { "epoch": 10.44690674053555, "grad_norm": 0.3333655297756195, "learning_rate": 1.6977e-05, "loss": 0.0237, "step": 5662 }, { "epoch": 10.448753462603879, "grad_norm": 0.28354814648628235, "learning_rate": 1.698e-05, "loss": 0.0163, "step": 5663 }, { "epoch": 10.450600184672206, "grad_norm": 0.5972402095794678, "learning_rate": 1.6983000000000003e-05, "loss": 0.033, "step": 5664 }, { "epoch": 10.452446906740535, "grad_norm": 0.3524409830570221, "learning_rate": 1.6986000000000003e-05, "loss": 0.0125, "step": 5665 }, { "epoch": 10.454293628808864, "grad_norm": 0.46399515867233276, "learning_rate": 1.6989000000000003e-05, "loss": 0.0194, "step": 5666 }, { "epoch": 10.456140350877194, "grad_norm": 0.5713216066360474, "learning_rate": 1.6992e-05, "loss": 0.0223, "step": 5667 }, { "epoch": 10.45798707294552, "grad_norm": 0.5732083916664124, "learning_rate": 1.6995e-05, "loss": 0.018, "step": 5668 }, { "epoch": 10.45983379501385, "grad_norm": 0.5675196647644043, "learning_rate": 1.6998e-05, "loss": 0.0243, "step": 5669 }, { "epoch": 10.46168051708218, "grad_norm": 0.6661143898963928, "learning_rate": 1.7001e-05, "loss": 0.0647, "step": 5670 }, { "epoch": 10.463527239150508, "grad_norm": 0.9836185574531555, "learning_rate": 1.7004e-05, "loss": 0.2418, "step": 5671 }, { "epoch": 10.465373961218837, "grad_norm": 0.9940314888954163, "learning_rate": 1.7006999999999998e-05, "loss": 0.2224, "step": 5672 }, { "epoch": 10.467220683287165, "grad_norm": 0.9100486636161804, "learning_rate": 1.7009999999999998e-05, "loss": 0.2137, "step": 5673 }, { "epoch": 10.469067405355494, "grad_norm": 0.7420975565910339, "learning_rate": 1.7013e-05, "loss": 0.1624, "step": 5674 }, { "epoch": 10.470914127423823, "grad_norm": 0.7102513909339905, "learning_rate": 1.7016e-05, "loss": 0.1962, "step": 5675 }, { "epoch": 10.472760849492152, "grad_norm": 0.7101590633392334, "learning_rate": 1.7019e-05, "loss": 0.1239, "step": 5676 }, { "epoch": 10.47460757156048, "grad_norm": 0.8599352240562439, "learning_rate": 1.7022e-05, "loss": 0.1271, "step": 5677 }, { "epoch": 10.476454293628809, "grad_norm": 0.9525429010391235, "learning_rate": 1.7025e-05, "loss": 0.1764, "step": 5678 }, { "epoch": 10.478301015697138, "grad_norm": 0.6291274428367615, "learning_rate": 1.7028e-05, "loss": 0.0883, "step": 5679 }, { "epoch": 10.480147737765467, "grad_norm": 1.0020326375961304, "learning_rate": 1.7031e-05, "loss": 0.1492, "step": 5680 }, { "epoch": 10.481994459833794, "grad_norm": 0.7298393249511719, "learning_rate": 1.7034e-05, "loss": 0.1428, "step": 5681 }, { "epoch": 10.483841181902124, "grad_norm": 0.6903399229049683, "learning_rate": 1.7037e-05, "loss": 0.1012, "step": 5682 }, { "epoch": 10.485687903970453, "grad_norm": 0.5499876141548157, "learning_rate": 1.704e-05, "loss": 0.0598, "step": 5683 }, { "epoch": 10.487534626038782, "grad_norm": 0.886182963848114, "learning_rate": 1.7043000000000003e-05, "loss": 0.1237, "step": 5684 }, { "epoch": 10.48938134810711, "grad_norm": 0.6885615587234497, "learning_rate": 1.7046000000000002e-05, "loss": 0.088, "step": 5685 }, { "epoch": 10.491228070175438, "grad_norm": 0.5300734639167786, "learning_rate": 1.7049000000000002e-05, "loss": 0.0481, "step": 5686 }, { "epoch": 10.493074792243767, "grad_norm": 0.6662781834602356, "learning_rate": 1.7052000000000002e-05, "loss": 0.0389, "step": 5687 }, { "epoch": 10.494921514312097, "grad_norm": 0.5047277808189392, "learning_rate": 1.7055000000000002e-05, "loss": 0.0163, "step": 5688 }, { "epoch": 10.496768236380424, "grad_norm": 0.5463379621505737, "learning_rate": 1.7058e-05, "loss": 0.04, "step": 5689 }, { "epoch": 10.498614958448753, "grad_norm": 0.5748884677886963, "learning_rate": 1.7061e-05, "loss": 0.0391, "step": 5690 }, { "epoch": 10.500461680517082, "grad_norm": 0.5658259987831116, "learning_rate": 1.7064e-05, "loss": 0.0291, "step": 5691 }, { "epoch": 10.502308402585411, "grad_norm": 0.36395278573036194, "learning_rate": 1.7066999999999998e-05, "loss": 0.0276, "step": 5692 }, { "epoch": 10.504155124653739, "grad_norm": 0.5200968980789185, "learning_rate": 1.7069999999999998e-05, "loss": 0.0344, "step": 5693 }, { "epoch": 10.506001846722068, "grad_norm": 0.41553211212158203, "learning_rate": 1.7073e-05, "loss": 0.0196, "step": 5694 }, { "epoch": 10.507848568790397, "grad_norm": 0.5262542366981506, "learning_rate": 1.7076e-05, "loss": 0.0177, "step": 5695 }, { "epoch": 10.509695290858726, "grad_norm": 0.4412112832069397, "learning_rate": 1.7079e-05, "loss": 0.025, "step": 5696 }, { "epoch": 10.511542012927055, "grad_norm": 0.37101516127586365, "learning_rate": 1.7082e-05, "loss": 0.0235, "step": 5697 }, { "epoch": 10.513388734995383, "grad_norm": 0.401090145111084, "learning_rate": 1.7085e-05, "loss": 0.023, "step": 5698 }, { "epoch": 10.515235457063712, "grad_norm": 0.46183472871780396, "learning_rate": 1.7088e-05, "loss": 0.0154, "step": 5699 }, { "epoch": 10.517082179132041, "grad_norm": 0.45999205112457275, "learning_rate": 1.7091e-05, "loss": 0.0305, "step": 5700 }, { "epoch": 10.51892890120037, "grad_norm": 0.444741815328598, "learning_rate": 1.7094e-05, "loss": 0.0275, "step": 5701 }, { "epoch": 10.520775623268698, "grad_norm": 0.4720512926578522, "learning_rate": 1.7097e-05, "loss": 0.0363, "step": 5702 }, { "epoch": 10.522622345337027, "grad_norm": 0.4139270782470703, "learning_rate": 1.71e-05, "loss": 0.0308, "step": 5703 }, { "epoch": 10.524469067405356, "grad_norm": 0.404547780752182, "learning_rate": 1.7103000000000002e-05, "loss": 0.0192, "step": 5704 }, { "epoch": 10.526315789473685, "grad_norm": 0.3558710813522339, "learning_rate": 1.7106000000000002e-05, "loss": 0.0198, "step": 5705 }, { "epoch": 10.528162511542012, "grad_norm": 0.8504428863525391, "learning_rate": 1.7109000000000002e-05, "loss": 0.0144, "step": 5706 }, { "epoch": 10.530009233610341, "grad_norm": 0.5197940468788147, "learning_rate": 1.7112e-05, "loss": 0.0244, "step": 5707 }, { "epoch": 10.53185595567867, "grad_norm": 0.5138944983482361, "learning_rate": 1.7115e-05, "loss": 0.0306, "step": 5708 }, { "epoch": 10.533702677747, "grad_norm": 0.530799925327301, "learning_rate": 1.7118e-05, "loss": 0.0253, "step": 5709 }, { "epoch": 10.535549399815327, "grad_norm": 0.751099705696106, "learning_rate": 1.7121e-05, "loss": 0.0442, "step": 5710 }, { "epoch": 10.537396121883656, "grad_norm": 0.4729883670806885, "learning_rate": 1.7124e-05, "loss": 0.0351, "step": 5711 }, { "epoch": 10.539242843951985, "grad_norm": 0.3551481068134308, "learning_rate": 1.7127e-05, "loss": 0.013, "step": 5712 }, { "epoch": 10.541089566020315, "grad_norm": 0.38660553097724915, "learning_rate": 1.713e-05, "loss": 0.0188, "step": 5713 }, { "epoch": 10.542936288088642, "grad_norm": 0.3219395577907562, "learning_rate": 1.7133000000000004e-05, "loss": 0.0166, "step": 5714 }, { "epoch": 10.544783010156971, "grad_norm": 0.4016413688659668, "learning_rate": 1.7136000000000003e-05, "loss": 0.0287, "step": 5715 }, { "epoch": 10.5466297322253, "grad_norm": 0.5673303008079529, "learning_rate": 1.7139e-05, "loss": 0.0269, "step": 5716 }, { "epoch": 10.54847645429363, "grad_norm": 0.6439066529273987, "learning_rate": 1.7142e-05, "loss": 0.0156, "step": 5717 }, { "epoch": 10.550323176361957, "grad_norm": 0.7035136818885803, "learning_rate": 1.7145e-05, "loss": 0.0264, "step": 5718 }, { "epoch": 10.552169898430286, "grad_norm": 0.5405717492103577, "learning_rate": 1.7148e-05, "loss": 0.0269, "step": 5719 }, { "epoch": 10.554016620498615, "grad_norm": 0.7340378761291504, "learning_rate": 1.7151e-05, "loss": 0.0369, "step": 5720 }, { "epoch": 10.555863342566944, "grad_norm": 0.8675962686538696, "learning_rate": 1.7154e-05, "loss": 0.2132, "step": 5721 }, { "epoch": 10.557710064635273, "grad_norm": 0.6791530847549438, "learning_rate": 1.7157e-05, "loss": 0.2311, "step": 5722 }, { "epoch": 10.5595567867036, "grad_norm": 0.6052650213241577, "learning_rate": 1.716e-05, "loss": 0.1708, "step": 5723 }, { "epoch": 10.56140350877193, "grad_norm": 0.9645447134971619, "learning_rate": 1.7163e-05, "loss": 0.1836, "step": 5724 }, { "epoch": 10.563250230840259, "grad_norm": 1.0505342483520508, "learning_rate": 1.7166e-05, "loss": 0.1674, "step": 5725 }, { "epoch": 10.565096952908588, "grad_norm": 0.829861581325531, "learning_rate": 1.7169e-05, "loss": 0.1579, "step": 5726 }, { "epoch": 10.566943674976915, "grad_norm": 0.6052542328834534, "learning_rate": 1.7172e-05, "loss": 0.1572, "step": 5727 }, { "epoch": 10.568790397045245, "grad_norm": 0.6950606107711792, "learning_rate": 1.7175e-05, "loss": 0.1262, "step": 5728 }, { "epoch": 10.570637119113574, "grad_norm": 1.024465799331665, "learning_rate": 1.7178e-05, "loss": 0.1287, "step": 5729 }, { "epoch": 10.572483841181903, "grad_norm": 0.4541202485561371, "learning_rate": 1.7181e-05, "loss": 0.0765, "step": 5730 }, { "epoch": 10.57433056325023, "grad_norm": 0.7202557325363159, "learning_rate": 1.7184e-05, "loss": 0.0876, "step": 5731 }, { "epoch": 10.57617728531856, "grad_norm": 0.8201732635498047, "learning_rate": 1.7187e-05, "loss": 0.1163, "step": 5732 }, { "epoch": 10.578024007386889, "grad_norm": 1.2063844203948975, "learning_rate": 1.719e-05, "loss": 0.0757, "step": 5733 }, { "epoch": 10.579870729455218, "grad_norm": 0.510230541229248, "learning_rate": 1.7193000000000003e-05, "loss": 0.0441, "step": 5734 }, { "epoch": 10.581717451523545, "grad_norm": 0.6762166619300842, "learning_rate": 1.7196000000000003e-05, "loss": 0.1082, "step": 5735 }, { "epoch": 10.583564173591874, "grad_norm": 0.4172094762325287, "learning_rate": 1.7199000000000003e-05, "loss": 0.0368, "step": 5736 }, { "epoch": 10.585410895660203, "grad_norm": 0.3209916949272156, "learning_rate": 1.7202000000000002e-05, "loss": 0.0304, "step": 5737 }, { "epoch": 10.587257617728532, "grad_norm": 0.4255470335483551, "learning_rate": 1.7205000000000002e-05, "loss": 0.0551, "step": 5738 }, { "epoch": 10.58910433979686, "grad_norm": 0.3540935814380646, "learning_rate": 1.7208000000000002e-05, "loss": 0.0178, "step": 5739 }, { "epoch": 10.590951061865189, "grad_norm": 0.44914117455482483, "learning_rate": 1.7211000000000002e-05, "loss": 0.0627, "step": 5740 }, { "epoch": 10.592797783933518, "grad_norm": 0.4884379208087921, "learning_rate": 1.7213999999999998e-05, "loss": 0.0196, "step": 5741 }, { "epoch": 10.594644506001847, "grad_norm": 0.2642185091972351, "learning_rate": 1.7216999999999998e-05, "loss": 0.0188, "step": 5742 }, { "epoch": 10.596491228070175, "grad_norm": 0.42987197637557983, "learning_rate": 1.7219999999999998e-05, "loss": 0.0245, "step": 5743 }, { "epoch": 10.598337950138504, "grad_norm": 0.3514634966850281, "learning_rate": 1.7223e-05, "loss": 0.0217, "step": 5744 }, { "epoch": 10.600184672206833, "grad_norm": 0.24382612109184265, "learning_rate": 1.7226e-05, "loss": 0.01, "step": 5745 }, { "epoch": 10.602031394275162, "grad_norm": 0.6637547016143799, "learning_rate": 1.7229e-05, "loss": 0.0287, "step": 5746 }, { "epoch": 10.603878116343491, "grad_norm": 0.6304849982261658, "learning_rate": 1.7232e-05, "loss": 0.0218, "step": 5747 }, { "epoch": 10.605724838411819, "grad_norm": 0.5097060203552246, "learning_rate": 1.7235e-05, "loss": 0.0198, "step": 5748 }, { "epoch": 10.607571560480148, "grad_norm": 0.3933012783527374, "learning_rate": 1.7238e-05, "loss": 0.0127, "step": 5749 }, { "epoch": 10.609418282548477, "grad_norm": 0.3572508692741394, "learning_rate": 1.7241e-05, "loss": 0.0583, "step": 5750 }, { "epoch": 10.611265004616806, "grad_norm": 0.5863717198371887, "learning_rate": 1.7244e-05, "loss": 0.0528, "step": 5751 }, { "epoch": 10.613111726685133, "grad_norm": 0.44540056586265564, "learning_rate": 1.7247e-05, "loss": 0.0233, "step": 5752 }, { "epoch": 10.614958448753463, "grad_norm": 0.4500695765018463, "learning_rate": 1.725e-05, "loss": 0.0226, "step": 5753 }, { "epoch": 10.616805170821792, "grad_norm": 0.9237388968467712, "learning_rate": 1.7253e-05, "loss": 0.0319, "step": 5754 }, { "epoch": 10.61865189289012, "grad_norm": 0.5861974954605103, "learning_rate": 1.7256000000000002e-05, "loss": 0.0345, "step": 5755 }, { "epoch": 10.620498614958448, "grad_norm": 0.4686151444911957, "learning_rate": 1.7259000000000002e-05, "loss": 0.0279, "step": 5756 }, { "epoch": 10.622345337026777, "grad_norm": 0.40479278564453125, "learning_rate": 1.7262000000000002e-05, "loss": 0.0149, "step": 5757 }, { "epoch": 10.624192059095106, "grad_norm": 0.5644612312316895, "learning_rate": 1.7265e-05, "loss": 0.0185, "step": 5758 }, { "epoch": 10.626038781163436, "grad_norm": 0.7194874882698059, "learning_rate": 1.7268e-05, "loss": 0.029, "step": 5759 }, { "epoch": 10.627885503231763, "grad_norm": 0.32438820600509644, "learning_rate": 1.7271e-05, "loss": 0.0162, "step": 5760 }, { "epoch": 10.629732225300092, "grad_norm": 0.47052034735679626, "learning_rate": 1.7274e-05, "loss": 0.0193, "step": 5761 }, { "epoch": 10.631578947368421, "grad_norm": 1.1280709505081177, "learning_rate": 1.7277e-05, "loss": 0.035, "step": 5762 }, { "epoch": 10.63342566943675, "grad_norm": 0.469835489988327, "learning_rate": 1.728e-05, "loss": 0.0168, "step": 5763 }, { "epoch": 10.635272391505078, "grad_norm": 0.5156711339950562, "learning_rate": 1.7283e-05, "loss": 0.0178, "step": 5764 }, { "epoch": 10.637119113573407, "grad_norm": 0.5281715989112854, "learning_rate": 1.7286e-05, "loss": 0.0275, "step": 5765 }, { "epoch": 10.638965835641736, "grad_norm": 0.45756691694259644, "learning_rate": 1.7289e-05, "loss": 0.025, "step": 5766 }, { "epoch": 10.640812557710065, "grad_norm": 0.4730209708213806, "learning_rate": 1.7292e-05, "loss": 0.026, "step": 5767 }, { "epoch": 10.642659279778393, "grad_norm": 1.0519452095031738, "learning_rate": 1.7295e-05, "loss": 0.0384, "step": 5768 }, { "epoch": 10.644506001846722, "grad_norm": 0.6030709743499756, "learning_rate": 1.7298e-05, "loss": 0.0318, "step": 5769 }, { "epoch": 10.64635272391505, "grad_norm": 1.0561301708221436, "learning_rate": 1.7301e-05, "loss": 0.0237, "step": 5770 }, { "epoch": 10.64819944598338, "grad_norm": 0.9371124505996704, "learning_rate": 1.7304e-05, "loss": 0.2677, "step": 5771 }, { "epoch": 10.65004616805171, "grad_norm": 0.6510640382766724, "learning_rate": 1.7307e-05, "loss": 0.1784, "step": 5772 }, { "epoch": 10.651892890120036, "grad_norm": 0.9221227765083313, "learning_rate": 1.731e-05, "loss": 0.197, "step": 5773 }, { "epoch": 10.653739612188366, "grad_norm": 0.7787302136421204, "learning_rate": 1.7313e-05, "loss": 0.1623, "step": 5774 }, { "epoch": 10.655586334256695, "grad_norm": 0.6928793787956238, "learning_rate": 1.7316e-05, "loss": 0.1452, "step": 5775 }, { "epoch": 10.657433056325024, "grad_norm": 0.580687403678894, "learning_rate": 1.7319e-05, "loss": 0.1221, "step": 5776 }, { "epoch": 10.659279778393351, "grad_norm": 0.8013032674789429, "learning_rate": 1.7322e-05, "loss": 0.1252, "step": 5777 }, { "epoch": 10.66112650046168, "grad_norm": 0.755961000919342, "learning_rate": 1.7325e-05, "loss": 0.1432, "step": 5778 }, { "epoch": 10.66297322253001, "grad_norm": 1.1269781589508057, "learning_rate": 1.7328e-05, "loss": 0.142, "step": 5779 }, { "epoch": 10.664819944598339, "grad_norm": 0.8808785676956177, "learning_rate": 1.7331e-05, "loss": 0.1661, "step": 5780 }, { "epoch": 10.666666666666666, "grad_norm": 0.7470980286598206, "learning_rate": 1.7334e-05, "loss": 0.0968, "step": 5781 }, { "epoch": 10.668513388734995, "grad_norm": 0.519422173500061, "learning_rate": 1.7337e-05, "loss": 0.0657, "step": 5782 }, { "epoch": 10.670360110803324, "grad_norm": 0.6593102812767029, "learning_rate": 1.734e-05, "loss": 0.1161, "step": 5783 }, { "epoch": 10.672206832871654, "grad_norm": 0.48402902483940125, "learning_rate": 1.7343e-05, "loss": 0.0666, "step": 5784 }, { "epoch": 10.67405355493998, "grad_norm": 0.3188153803348541, "learning_rate": 1.7346000000000003e-05, "loss": 0.0356, "step": 5785 }, { "epoch": 10.67590027700831, "grad_norm": 0.5182211995124817, "learning_rate": 1.7349000000000003e-05, "loss": 0.0434, "step": 5786 }, { "epoch": 10.67774699907664, "grad_norm": 0.5140071511268616, "learning_rate": 1.7352000000000003e-05, "loss": 0.0403, "step": 5787 }, { "epoch": 10.679593721144968, "grad_norm": 0.4483383297920227, "learning_rate": 1.7355000000000002e-05, "loss": 0.0437, "step": 5788 }, { "epoch": 10.681440443213296, "grad_norm": 1.0645132064819336, "learning_rate": 1.7358000000000002e-05, "loss": 0.0577, "step": 5789 }, { "epoch": 10.683287165281625, "grad_norm": 0.7384029626846313, "learning_rate": 1.7361e-05, "loss": 0.0997, "step": 5790 }, { "epoch": 10.685133887349954, "grad_norm": 0.4034633934497833, "learning_rate": 1.7364e-05, "loss": 0.0212, "step": 5791 }, { "epoch": 10.686980609418283, "grad_norm": 0.2344970703125, "learning_rate": 1.7366999999999998e-05, "loss": 0.0175, "step": 5792 }, { "epoch": 10.68882733148661, "grad_norm": 0.49585863947868347, "learning_rate": 1.7369999999999998e-05, "loss": 0.0317, "step": 5793 }, { "epoch": 10.69067405355494, "grad_norm": 0.5580799579620361, "learning_rate": 1.7372999999999998e-05, "loss": 0.0279, "step": 5794 }, { "epoch": 10.692520775623269, "grad_norm": 0.3624635934829712, "learning_rate": 1.7376e-05, "loss": 0.0176, "step": 5795 }, { "epoch": 10.694367497691598, "grad_norm": 0.5163046717643738, "learning_rate": 1.7379e-05, "loss": 0.0277, "step": 5796 }, { "epoch": 10.696214219759927, "grad_norm": 0.467898964881897, "learning_rate": 1.7382e-05, "loss": 0.0254, "step": 5797 }, { "epoch": 10.698060941828254, "grad_norm": 0.3116570711135864, "learning_rate": 1.7385e-05, "loss": 0.0142, "step": 5798 }, { "epoch": 10.699907663896584, "grad_norm": 0.5890852808952332, "learning_rate": 1.7388e-05, "loss": 0.0153, "step": 5799 }, { "epoch": 10.701754385964913, "grad_norm": 0.37547069787979126, "learning_rate": 1.7391e-05, "loss": 0.0155, "step": 5800 }, { "epoch": 10.703601108033242, "grad_norm": 0.5420143008232117, "learning_rate": 1.7394e-05, "loss": 0.0314, "step": 5801 }, { "epoch": 10.70544783010157, "grad_norm": 0.5886558294296265, "learning_rate": 1.7397e-05, "loss": 0.0313, "step": 5802 }, { "epoch": 10.707294552169898, "grad_norm": 0.6461412310600281, "learning_rate": 1.74e-05, "loss": 0.0356, "step": 5803 }, { "epoch": 10.709141274238227, "grad_norm": 0.4910597801208496, "learning_rate": 1.7403e-05, "loss": 0.0198, "step": 5804 }, { "epoch": 10.710987996306557, "grad_norm": 0.2973109483718872, "learning_rate": 1.7406000000000002e-05, "loss": 0.0152, "step": 5805 }, { "epoch": 10.712834718374884, "grad_norm": 0.40196624398231506, "learning_rate": 1.7409000000000002e-05, "loss": 0.0179, "step": 5806 }, { "epoch": 10.714681440443213, "grad_norm": 0.4059551954269409, "learning_rate": 1.7412000000000002e-05, "loss": 0.0191, "step": 5807 }, { "epoch": 10.716528162511542, "grad_norm": 0.8953748941421509, "learning_rate": 1.7415000000000002e-05, "loss": 0.0298, "step": 5808 }, { "epoch": 10.718374884579871, "grad_norm": 0.9182073473930359, "learning_rate": 1.7418e-05, "loss": 0.0306, "step": 5809 }, { "epoch": 10.720221606648199, "grad_norm": 0.5914534330368042, "learning_rate": 1.7421e-05, "loss": 0.0303, "step": 5810 }, { "epoch": 10.722068328716528, "grad_norm": 0.5903384685516357, "learning_rate": 1.7424e-05, "loss": 0.0245, "step": 5811 }, { "epoch": 10.723915050784857, "grad_norm": 0.49383530020713806, "learning_rate": 1.7427e-05, "loss": 0.0261, "step": 5812 }, { "epoch": 10.725761772853186, "grad_norm": 0.5673595666885376, "learning_rate": 1.743e-05, "loss": 0.0287, "step": 5813 }, { "epoch": 10.727608494921514, "grad_norm": 0.6683375835418701, "learning_rate": 1.7432999999999997e-05, "loss": 0.0309, "step": 5814 }, { "epoch": 10.729455216989843, "grad_norm": 0.34646183252334595, "learning_rate": 1.7436e-05, "loss": 0.014, "step": 5815 }, { "epoch": 10.731301939058172, "grad_norm": 0.599746584892273, "learning_rate": 1.7439e-05, "loss": 0.0249, "step": 5816 }, { "epoch": 10.733148661126501, "grad_norm": 0.7076974511146545, "learning_rate": 1.7442e-05, "loss": 0.037, "step": 5817 }, { "epoch": 10.734995383194828, "grad_norm": 0.9104524850845337, "learning_rate": 1.7445e-05, "loss": 0.0311, "step": 5818 }, { "epoch": 10.736842105263158, "grad_norm": 0.5769959092140198, "learning_rate": 1.7448e-05, "loss": 0.0355, "step": 5819 }, { "epoch": 10.738688827331487, "grad_norm": 0.5935896039009094, "learning_rate": 1.7451e-05, "loss": 0.021, "step": 5820 }, { "epoch": 10.740535549399816, "grad_norm": 1.0296519994735718, "learning_rate": 1.7454e-05, "loss": 0.2168, "step": 5821 }, { "epoch": 10.742382271468145, "grad_norm": 0.8563361167907715, "learning_rate": 1.7457e-05, "loss": 0.2389, "step": 5822 }, { "epoch": 10.744228993536472, "grad_norm": 0.8937773704528809, "learning_rate": 1.746e-05, "loss": 0.2175, "step": 5823 }, { "epoch": 10.746075715604801, "grad_norm": 0.9618704915046692, "learning_rate": 1.7463e-05, "loss": 0.148, "step": 5824 }, { "epoch": 10.74792243767313, "grad_norm": 0.9022472500801086, "learning_rate": 1.7466000000000002e-05, "loss": 0.2013, "step": 5825 }, { "epoch": 10.749769159741458, "grad_norm": 0.6519712209701538, "learning_rate": 1.7469e-05, "loss": 0.1351, "step": 5826 }, { "epoch": 10.751615881809787, "grad_norm": 0.5046953558921814, "learning_rate": 1.7472e-05, "loss": 0.1077, "step": 5827 }, { "epoch": 10.753462603878116, "grad_norm": 1.0729660987854004, "learning_rate": 1.7475e-05, "loss": 0.1374, "step": 5828 }, { "epoch": 10.755309325946445, "grad_norm": 0.735659658908844, "learning_rate": 1.7478e-05, "loss": 0.1447, "step": 5829 }, { "epoch": 10.757156048014775, "grad_norm": 0.8504224419593811, "learning_rate": 1.7481e-05, "loss": 0.1169, "step": 5830 }, { "epoch": 10.759002770083102, "grad_norm": 0.7671719789505005, "learning_rate": 1.7484e-05, "loss": 0.0831, "step": 5831 }, { "epoch": 10.760849492151431, "grad_norm": 0.3581356108188629, "learning_rate": 1.7487e-05, "loss": 0.043, "step": 5832 }, { "epoch": 10.76269621421976, "grad_norm": 0.5683116316795349, "learning_rate": 1.749e-05, "loss": 0.1233, "step": 5833 }, { "epoch": 10.76454293628809, "grad_norm": 0.4286747872829437, "learning_rate": 1.7493e-05, "loss": 0.0561, "step": 5834 }, { "epoch": 10.766389658356417, "grad_norm": 0.7286791801452637, "learning_rate": 1.7496000000000003e-05, "loss": 0.0465, "step": 5835 }, { "epoch": 10.768236380424746, "grad_norm": 0.6198620796203613, "learning_rate": 1.7499000000000003e-05, "loss": 0.0647, "step": 5836 }, { "epoch": 10.770083102493075, "grad_norm": 0.39466071128845215, "learning_rate": 1.7502000000000003e-05, "loss": 0.0338, "step": 5837 }, { "epoch": 10.771929824561404, "grad_norm": 0.5323166251182556, "learning_rate": 1.7505000000000003e-05, "loss": 0.0309, "step": 5838 }, { "epoch": 10.773776546629731, "grad_norm": 0.6481252312660217, "learning_rate": 1.7508e-05, "loss": 0.0314, "step": 5839 }, { "epoch": 10.77562326869806, "grad_norm": 0.623769223690033, "learning_rate": 1.7511e-05, "loss": 0.0316, "step": 5840 }, { "epoch": 10.77746999076639, "grad_norm": 0.6676685810089111, "learning_rate": 1.7514e-05, "loss": 0.0421, "step": 5841 }, { "epoch": 10.779316712834719, "grad_norm": 0.5272042155265808, "learning_rate": 1.7517e-05, "loss": 0.0426, "step": 5842 }, { "epoch": 10.781163434903046, "grad_norm": 0.6411356925964355, "learning_rate": 1.7519999999999998e-05, "loss": 0.0367, "step": 5843 }, { "epoch": 10.783010156971375, "grad_norm": 0.7479405999183655, "learning_rate": 1.7522999999999998e-05, "loss": 0.02, "step": 5844 }, { "epoch": 10.784856879039705, "grad_norm": 0.37660306692123413, "learning_rate": 1.7526e-05, "loss": 0.0288, "step": 5845 }, { "epoch": 10.786703601108034, "grad_norm": 0.9735251665115356, "learning_rate": 1.7529e-05, "loss": 0.0539, "step": 5846 }, { "epoch": 10.788550323176363, "grad_norm": 0.6767516136169434, "learning_rate": 1.7532e-05, "loss": 0.0234, "step": 5847 }, { "epoch": 10.79039704524469, "grad_norm": 0.42775028944015503, "learning_rate": 1.7535e-05, "loss": 0.0186, "step": 5848 }, { "epoch": 10.79224376731302, "grad_norm": 0.3126576542854309, "learning_rate": 1.7538e-05, "loss": 0.0109, "step": 5849 }, { "epoch": 10.794090489381349, "grad_norm": 0.41386958956718445, "learning_rate": 1.7541e-05, "loss": 0.029, "step": 5850 }, { "epoch": 10.795937211449676, "grad_norm": 0.33370286226272583, "learning_rate": 1.7544e-05, "loss": 0.0178, "step": 5851 }, { "epoch": 10.797783933518005, "grad_norm": 0.6642718315124512, "learning_rate": 1.7547e-05, "loss": 0.0294, "step": 5852 }, { "epoch": 10.799630655586334, "grad_norm": 0.6200350522994995, "learning_rate": 1.755e-05, "loss": 0.0254, "step": 5853 }, { "epoch": 10.801477377654663, "grad_norm": 1.2748810052871704, "learning_rate": 1.7553e-05, "loss": 0.0452, "step": 5854 }, { "epoch": 10.803324099722992, "grad_norm": 0.811354398727417, "learning_rate": 1.7556000000000003e-05, "loss": 0.0254, "step": 5855 }, { "epoch": 10.80517082179132, "grad_norm": 0.5516785979270935, "learning_rate": 1.7559000000000002e-05, "loss": 0.0271, "step": 5856 }, { "epoch": 10.807017543859649, "grad_norm": 0.5956955552101135, "learning_rate": 1.7562000000000002e-05, "loss": 0.0197, "step": 5857 }, { "epoch": 10.808864265927978, "grad_norm": 0.531806468963623, "learning_rate": 1.7565000000000002e-05, "loss": 0.0289, "step": 5858 }, { "epoch": 10.810710987996307, "grad_norm": 0.5544119477272034, "learning_rate": 1.7568000000000002e-05, "loss": 0.0229, "step": 5859 }, { "epoch": 10.812557710064635, "grad_norm": 0.528141975402832, "learning_rate": 1.7571e-05, "loss": 0.0207, "step": 5860 }, { "epoch": 10.814404432132964, "grad_norm": 0.5564522743225098, "learning_rate": 1.7574e-05, "loss": 0.0293, "step": 5861 }, { "epoch": 10.816251154201293, "grad_norm": 0.3546724319458008, "learning_rate": 1.7577e-05, "loss": 0.0113, "step": 5862 }, { "epoch": 10.818097876269622, "grad_norm": 0.46226778626441956, "learning_rate": 1.758e-05, "loss": 0.0212, "step": 5863 }, { "epoch": 10.81994459833795, "grad_norm": 0.38217854499816895, "learning_rate": 1.7582999999999998e-05, "loss": 0.0236, "step": 5864 }, { "epoch": 10.821791320406279, "grad_norm": 0.6104041934013367, "learning_rate": 1.7586e-05, "loss": 0.0207, "step": 5865 }, { "epoch": 10.823638042474608, "grad_norm": 0.4689196050167084, "learning_rate": 1.7589e-05, "loss": 0.0197, "step": 5866 }, { "epoch": 10.825484764542937, "grad_norm": 0.4690294563770294, "learning_rate": 1.7592e-05, "loss": 0.0218, "step": 5867 }, { "epoch": 10.827331486611264, "grad_norm": 0.8422415852546692, "learning_rate": 1.7595e-05, "loss": 0.0335, "step": 5868 }, { "epoch": 10.829178208679593, "grad_norm": 0.9045748114585876, "learning_rate": 1.7598e-05, "loss": 0.0321, "step": 5869 }, { "epoch": 10.831024930747922, "grad_norm": 1.0264004468917847, "learning_rate": 1.7601e-05, "loss": 0.0389, "step": 5870 }, { "epoch": 10.832871652816252, "grad_norm": 1.2848174571990967, "learning_rate": 1.7604e-05, "loss": 0.2561, "step": 5871 }, { "epoch": 10.83471837488458, "grad_norm": 1.1149464845657349, "learning_rate": 1.7607e-05, "loss": 0.2667, "step": 5872 }, { "epoch": 10.836565096952908, "grad_norm": 1.0092140436172485, "learning_rate": 1.761e-05, "loss": 0.2504, "step": 5873 }, { "epoch": 10.838411819021237, "grad_norm": 0.6515405774116516, "learning_rate": 1.7613e-05, "loss": 0.153, "step": 5874 }, { "epoch": 10.840258541089566, "grad_norm": 0.7586976885795593, "learning_rate": 1.7616000000000002e-05, "loss": 0.2003, "step": 5875 }, { "epoch": 10.842105263157894, "grad_norm": 0.5531761646270752, "learning_rate": 1.7619000000000002e-05, "loss": 0.1495, "step": 5876 }, { "epoch": 10.843951985226223, "grad_norm": 0.696130633354187, "learning_rate": 1.7622000000000002e-05, "loss": 0.127, "step": 5877 }, { "epoch": 10.845798707294552, "grad_norm": 0.8173731565475464, "learning_rate": 1.7625e-05, "loss": 0.1449, "step": 5878 }, { "epoch": 10.847645429362881, "grad_norm": 1.2938200235366821, "learning_rate": 1.7628e-05, "loss": 0.1448, "step": 5879 }, { "epoch": 10.84949215143121, "grad_norm": 0.8170304894447327, "learning_rate": 1.7631e-05, "loss": 0.0966, "step": 5880 }, { "epoch": 10.851338873499538, "grad_norm": 0.8002492189407349, "learning_rate": 1.7634e-05, "loss": 0.0673, "step": 5881 }, { "epoch": 10.853185595567867, "grad_norm": 0.7990866899490356, "learning_rate": 1.7637e-05, "loss": 0.0818, "step": 5882 }, { "epoch": 10.855032317636196, "grad_norm": 0.5102277994155884, "learning_rate": 1.764e-05, "loss": 0.0492, "step": 5883 }, { "epoch": 10.856879039704525, "grad_norm": 1.3296751976013184, "learning_rate": 1.7643e-05, "loss": 0.0644, "step": 5884 }, { "epoch": 10.858725761772853, "grad_norm": 0.59538733959198, "learning_rate": 1.7646e-05, "loss": 0.05, "step": 5885 }, { "epoch": 10.860572483841182, "grad_norm": 0.4024272561073303, "learning_rate": 1.7649000000000003e-05, "loss": 0.0357, "step": 5886 }, { "epoch": 10.86241920590951, "grad_norm": 0.5509514212608337, "learning_rate": 1.7652000000000003e-05, "loss": 0.0313, "step": 5887 }, { "epoch": 10.86426592797784, "grad_norm": 0.6461734771728516, "learning_rate": 1.7655e-05, "loss": 0.049, "step": 5888 }, { "epoch": 10.866112650046167, "grad_norm": 0.7171344757080078, "learning_rate": 1.7658e-05, "loss": 0.032, "step": 5889 }, { "epoch": 10.867959372114496, "grad_norm": 0.5926627516746521, "learning_rate": 1.7661e-05, "loss": 0.0275, "step": 5890 }, { "epoch": 10.869806094182826, "grad_norm": 0.38975149393081665, "learning_rate": 1.7664e-05, "loss": 0.0204, "step": 5891 }, { "epoch": 10.871652816251155, "grad_norm": 0.32457953691482544, "learning_rate": 1.7667e-05, "loss": 0.0259, "step": 5892 }, { "epoch": 10.873499538319482, "grad_norm": 0.29443931579589844, "learning_rate": 1.767e-05, "loss": 0.0175, "step": 5893 }, { "epoch": 10.875346260387811, "grad_norm": 0.36705857515335083, "learning_rate": 1.7673e-05, "loss": 0.0233, "step": 5894 }, { "epoch": 10.87719298245614, "grad_norm": 0.4953431487083435, "learning_rate": 1.7675999999999998e-05, "loss": 0.0372, "step": 5895 }, { "epoch": 10.87903970452447, "grad_norm": 0.5245679616928101, "learning_rate": 1.7679e-05, "loss": 0.0296, "step": 5896 }, { "epoch": 10.880886426592799, "grad_norm": 0.3879951536655426, "learning_rate": 1.7682e-05, "loss": 0.0238, "step": 5897 }, { "epoch": 10.882733148661126, "grad_norm": 0.8937625885009766, "learning_rate": 1.7685e-05, "loss": 0.0178, "step": 5898 }, { "epoch": 10.884579870729455, "grad_norm": 0.47701773047447205, "learning_rate": 1.7688e-05, "loss": 0.0272, "step": 5899 }, { "epoch": 10.886426592797784, "grad_norm": 0.32832854986190796, "learning_rate": 1.7691e-05, "loss": 0.0151, "step": 5900 }, { "epoch": 10.888273314866112, "grad_norm": 0.4349646270275116, "learning_rate": 1.7694e-05, "loss": 0.032, "step": 5901 }, { "epoch": 10.89012003693444, "grad_norm": 0.47184181213378906, "learning_rate": 1.7697e-05, "loss": 0.0194, "step": 5902 }, { "epoch": 10.89196675900277, "grad_norm": 0.29045549035072327, "learning_rate": 1.77e-05, "loss": 0.0117, "step": 5903 }, { "epoch": 10.8938134810711, "grad_norm": 0.3673584759235382, "learning_rate": 1.7703e-05, "loss": 0.0145, "step": 5904 }, { "epoch": 10.895660203139428, "grad_norm": 0.41271263360977173, "learning_rate": 1.7706e-05, "loss": 0.0216, "step": 5905 }, { "epoch": 10.897506925207756, "grad_norm": 0.38137903809547424, "learning_rate": 1.7709000000000003e-05, "loss": 0.0228, "step": 5906 }, { "epoch": 10.899353647276085, "grad_norm": 0.46189069747924805, "learning_rate": 1.7712000000000003e-05, "loss": 0.0269, "step": 5907 }, { "epoch": 10.901200369344414, "grad_norm": 0.9427136182785034, "learning_rate": 1.7715000000000002e-05, "loss": 0.0381, "step": 5908 }, { "epoch": 10.903047091412743, "grad_norm": 0.7765113115310669, "learning_rate": 1.7718000000000002e-05, "loss": 0.0273, "step": 5909 }, { "epoch": 10.90489381348107, "grad_norm": 0.5198538899421692, "learning_rate": 1.7721000000000002e-05, "loss": 0.0262, "step": 5910 }, { "epoch": 10.9067405355494, "grad_norm": 0.4149373471736908, "learning_rate": 1.7724000000000002e-05, "loss": 0.0241, "step": 5911 }, { "epoch": 10.908587257617729, "grad_norm": 0.39832204580307007, "learning_rate": 1.7727e-05, "loss": 0.0174, "step": 5912 }, { "epoch": 10.910433979686058, "grad_norm": 0.46863245964050293, "learning_rate": 1.7729999999999998e-05, "loss": 0.0162, "step": 5913 }, { "epoch": 10.912280701754385, "grad_norm": 0.5959219932556152, "learning_rate": 1.7732999999999998e-05, "loss": 0.0229, "step": 5914 }, { "epoch": 10.914127423822714, "grad_norm": 0.626492977142334, "learning_rate": 1.7735999999999998e-05, "loss": 0.0334, "step": 5915 }, { "epoch": 10.915974145891044, "grad_norm": 0.35012662410736084, "learning_rate": 1.7739e-05, "loss": 0.0142, "step": 5916 }, { "epoch": 10.917820867959373, "grad_norm": 0.5902463793754578, "learning_rate": 1.7742e-05, "loss": 0.0281, "step": 5917 }, { "epoch": 10.9196675900277, "grad_norm": 0.5554072856903076, "learning_rate": 1.7745e-05, "loss": 0.017, "step": 5918 }, { "epoch": 10.92151431209603, "grad_norm": 0.5523650646209717, "learning_rate": 1.7748e-05, "loss": 0.0333, "step": 5919 }, { "epoch": 10.923361034164358, "grad_norm": 0.5115478038787842, "learning_rate": 1.7751e-05, "loss": 0.0231, "step": 5920 }, { "epoch": 10.925207756232687, "grad_norm": 0.8140024542808533, "learning_rate": 1.7754e-05, "loss": 0.2674, "step": 5921 }, { "epoch": 10.927054478301017, "grad_norm": 1.0150563716888428, "learning_rate": 1.7757e-05, "loss": 0.1972, "step": 5922 }, { "epoch": 10.928901200369344, "grad_norm": 0.8180074095726013, "learning_rate": 1.776e-05, "loss": 0.1416, "step": 5923 }, { "epoch": 10.930747922437673, "grad_norm": 0.6263332962989807, "learning_rate": 1.7763e-05, "loss": 0.154, "step": 5924 }, { "epoch": 10.932594644506002, "grad_norm": 0.7633740901947021, "learning_rate": 1.7766e-05, "loss": 0.1867, "step": 5925 }, { "epoch": 10.93444136657433, "grad_norm": 0.7075972557067871, "learning_rate": 1.7769000000000002e-05, "loss": 0.1033, "step": 5926 }, { "epoch": 10.936288088642659, "grad_norm": 0.9612365961074829, "learning_rate": 1.7772000000000002e-05, "loss": 0.1497, "step": 5927 }, { "epoch": 10.938134810710988, "grad_norm": 0.5302220582962036, "learning_rate": 1.7775000000000002e-05, "loss": 0.0802, "step": 5928 }, { "epoch": 10.939981532779317, "grad_norm": 0.7335618734359741, "learning_rate": 1.7778e-05, "loss": 0.1157, "step": 5929 }, { "epoch": 10.941828254847646, "grad_norm": 0.5661278963088989, "learning_rate": 1.7781e-05, "loss": 0.0775, "step": 5930 }, { "epoch": 10.943674976915974, "grad_norm": 0.580007016658783, "learning_rate": 1.7784e-05, "loss": 0.0852, "step": 5931 }, { "epoch": 10.945521698984303, "grad_norm": 0.7283288836479187, "learning_rate": 1.7787e-05, "loss": 0.0515, "step": 5932 }, { "epoch": 10.947368421052632, "grad_norm": 0.47408390045166016, "learning_rate": 1.779e-05, "loss": 0.0598, "step": 5933 }, { "epoch": 10.949215143120961, "grad_norm": 0.4382071793079376, "learning_rate": 1.7793e-05, "loss": 0.0297, "step": 5934 }, { "epoch": 10.951061865189288, "grad_norm": 0.4971558153629303, "learning_rate": 1.7796e-05, "loss": 0.0495, "step": 5935 }, { "epoch": 10.952908587257618, "grad_norm": 1.1129035949707031, "learning_rate": 1.7799000000000004e-05, "loss": 0.043, "step": 5936 }, { "epoch": 10.954755309325947, "grad_norm": 0.4687868654727936, "learning_rate": 1.7802e-05, "loss": 0.0481, "step": 5937 }, { "epoch": 10.956602031394276, "grad_norm": 0.611916184425354, "learning_rate": 1.7805e-05, "loss": 0.0697, "step": 5938 }, { "epoch": 10.958448753462603, "grad_norm": 0.5590572357177734, "learning_rate": 1.7808e-05, "loss": 0.0174, "step": 5939 }, { "epoch": 10.960295475530932, "grad_norm": 0.5645524263381958, "learning_rate": 1.7811e-05, "loss": 0.0227, "step": 5940 }, { "epoch": 10.962142197599261, "grad_norm": 0.5157508254051208, "learning_rate": 1.7814e-05, "loss": 0.0246, "step": 5941 }, { "epoch": 10.96398891966759, "grad_norm": 0.4864080250263214, "learning_rate": 1.7817e-05, "loss": 0.0357, "step": 5942 }, { "epoch": 10.965835641735918, "grad_norm": 0.3735795021057129, "learning_rate": 1.782e-05, "loss": 0.0199, "step": 5943 }, { "epoch": 10.967682363804247, "grad_norm": 0.5223242044448853, "learning_rate": 1.7823e-05, "loss": 0.0257, "step": 5944 }, { "epoch": 10.969529085872576, "grad_norm": 0.3293151259422302, "learning_rate": 1.7826e-05, "loss": 0.0186, "step": 5945 }, { "epoch": 10.971375807940905, "grad_norm": 0.6055980920791626, "learning_rate": 1.7829e-05, "loss": 0.0302, "step": 5946 }, { "epoch": 10.973222530009235, "grad_norm": 0.3997729420661926, "learning_rate": 1.7832e-05, "loss": 0.0225, "step": 5947 }, { "epoch": 10.975069252077562, "grad_norm": 0.49927785992622375, "learning_rate": 1.7835e-05, "loss": 0.025, "step": 5948 }, { "epoch": 10.976915974145891, "grad_norm": 0.8552719950675964, "learning_rate": 1.7838e-05, "loss": 0.037, "step": 5949 }, { "epoch": 10.97876269621422, "grad_norm": 0.7387699484825134, "learning_rate": 1.7841e-05, "loss": 0.0343, "step": 5950 }, { "epoch": 10.980609418282548, "grad_norm": 0.6182773113250732, "learning_rate": 1.7844e-05, "loss": 0.0288, "step": 5951 }, { "epoch": 10.982456140350877, "grad_norm": 0.6504404544830322, "learning_rate": 1.7847e-05, "loss": 0.0281, "step": 5952 }, { "epoch": 10.984302862419206, "grad_norm": 0.4910828173160553, "learning_rate": 1.785e-05, "loss": 0.0247, "step": 5953 }, { "epoch": 10.986149584487535, "grad_norm": 0.4561193585395813, "learning_rate": 1.7853e-05, "loss": 0.0296, "step": 5954 }, { "epoch": 10.987996306555864, "grad_norm": 0.7066456079483032, "learning_rate": 1.7856e-05, "loss": 0.0344, "step": 5955 }, { "epoch": 10.989843028624191, "grad_norm": 0.5314071178436279, "learning_rate": 1.7859000000000003e-05, "loss": 0.0296, "step": 5956 }, { "epoch": 10.99168975069252, "grad_norm": 0.5812799334526062, "learning_rate": 1.7862000000000003e-05, "loss": 0.0211, "step": 5957 }, { "epoch": 10.99353647276085, "grad_norm": 0.2950487732887268, "learning_rate": 1.7865000000000003e-05, "loss": 0.0136, "step": 5958 }, { "epoch": 10.995383194829179, "grad_norm": 1.0167781114578247, "learning_rate": 1.7868000000000002e-05, "loss": 0.041, "step": 5959 }, { "epoch": 10.997229916897506, "grad_norm": 0.41852226853370667, "learning_rate": 1.7871000000000002e-05, "loss": 0.0189, "step": 5960 }, { "epoch": 10.999076638965835, "grad_norm": 0.6753515005111694, "learning_rate": 1.7874000000000002e-05, "loss": 0.0239, "step": 5961 }, { "epoch": 11.0, "grad_norm": 0.3744104504585266, "learning_rate": 1.7877e-05, "loss": 0.0189, "step": 5962 }, { "epoch": 11.00184672206833, "grad_norm": 0.7426442503929138, "learning_rate": 1.7879999999999998e-05, "loss": 0.2933, "step": 5963 }, { "epoch": 11.003693444136658, "grad_norm": 0.6798139810562134, "learning_rate": 1.7882999999999998e-05, "loss": 0.1877, "step": 5964 }, { "epoch": 11.005540166204986, "grad_norm": 0.5603246688842773, "learning_rate": 1.7885999999999998e-05, "loss": 0.2133, "step": 5965 }, { "epoch": 11.007386888273315, "grad_norm": 0.8107830286026001, "learning_rate": 1.7889e-05, "loss": 0.1866, "step": 5966 }, { "epoch": 11.009233610341644, "grad_norm": 0.7005266547203064, "learning_rate": 1.7892e-05, "loss": 0.1359, "step": 5967 }, { "epoch": 11.011080332409973, "grad_norm": 0.8084649443626404, "learning_rate": 1.7895e-05, "loss": 0.1772, "step": 5968 }, { "epoch": 11.0129270544783, "grad_norm": 0.8491911292076111, "learning_rate": 1.7898e-05, "loss": 0.1294, "step": 5969 }, { "epoch": 11.01477377654663, "grad_norm": 0.7015768885612488, "learning_rate": 1.7901e-05, "loss": 0.1238, "step": 5970 }, { "epoch": 11.016620498614959, "grad_norm": 0.7217311263084412, "learning_rate": 1.7904e-05, "loss": 0.088, "step": 5971 }, { "epoch": 11.018467220683288, "grad_norm": 0.6328301429748535, "learning_rate": 1.7907e-05, "loss": 0.0878, "step": 5972 }, { "epoch": 11.020313942751615, "grad_norm": 0.6157311201095581, "learning_rate": 1.791e-05, "loss": 0.0828, "step": 5973 }, { "epoch": 11.022160664819944, "grad_norm": 0.5200340747833252, "learning_rate": 1.7913e-05, "loss": 0.0616, "step": 5974 }, { "epoch": 11.024007386888274, "grad_norm": 1.4208979606628418, "learning_rate": 1.7916e-05, "loss": 0.188, "step": 5975 }, { "epoch": 11.025854108956603, "grad_norm": 0.47305992245674133, "learning_rate": 1.7919000000000002e-05, "loss": 0.0323, "step": 5976 }, { "epoch": 11.02770083102493, "grad_norm": 0.46868613362312317, "learning_rate": 1.7922000000000002e-05, "loss": 0.044, "step": 5977 }, { "epoch": 11.02954755309326, "grad_norm": 0.8581869006156921, "learning_rate": 1.7925000000000002e-05, "loss": 0.0702, "step": 5978 }, { "epoch": 11.031394275161588, "grad_norm": 0.5283058285713196, "learning_rate": 1.7928000000000002e-05, "loss": 0.0424, "step": 5979 }, { "epoch": 11.033240997229917, "grad_norm": 0.5160033106803894, "learning_rate": 1.7931e-05, "loss": 0.0484, "step": 5980 }, { "epoch": 11.035087719298245, "grad_norm": 0.8869902491569519, "learning_rate": 1.7934e-05, "loss": 0.0201, "step": 5981 }, { "epoch": 11.036934441366574, "grad_norm": 0.5846513509750366, "learning_rate": 1.7937e-05, "loss": 0.0361, "step": 5982 }, { "epoch": 11.038781163434903, "grad_norm": 0.3070891797542572, "learning_rate": 1.794e-05, "loss": 0.018, "step": 5983 }, { "epoch": 11.040627885503232, "grad_norm": 0.6062940359115601, "learning_rate": 1.7943e-05, "loss": 0.0414, "step": 5984 }, { "epoch": 11.04247460757156, "grad_norm": 0.3984304964542389, "learning_rate": 1.7946e-05, "loss": 0.0183, "step": 5985 }, { "epoch": 11.044321329639889, "grad_norm": 0.4931993782520294, "learning_rate": 1.7949e-05, "loss": 0.0151, "step": 5986 }, { "epoch": 11.046168051708218, "grad_norm": 0.6660001873970032, "learning_rate": 1.7952e-05, "loss": 0.0291, "step": 5987 }, { "epoch": 11.048014773776547, "grad_norm": 0.6292256116867065, "learning_rate": 1.7955e-05, "loss": 0.0191, "step": 5988 }, { "epoch": 11.049861495844876, "grad_norm": 0.4108259975910187, "learning_rate": 1.7958e-05, "loss": 0.0171, "step": 5989 }, { "epoch": 11.051708217913204, "grad_norm": 0.4854716658592224, "learning_rate": 1.7961e-05, "loss": 0.0188, "step": 5990 }, { "epoch": 11.053554939981533, "grad_norm": 0.9647286534309387, "learning_rate": 1.7964e-05, "loss": 0.0191, "step": 5991 }, { "epoch": 11.055401662049862, "grad_norm": 0.733239471912384, "learning_rate": 1.7967e-05, "loss": 0.0599, "step": 5992 }, { "epoch": 11.057248384118191, "grad_norm": 0.3580063283443451, "learning_rate": 1.797e-05, "loss": 0.0164, "step": 5993 }, { "epoch": 11.059095106186518, "grad_norm": 0.40893301367759705, "learning_rate": 1.7973e-05, "loss": 0.0144, "step": 5994 }, { "epoch": 11.060941828254848, "grad_norm": 0.3047679364681244, "learning_rate": 1.7976e-05, "loss": 0.0136, "step": 5995 }, { "epoch": 11.062788550323177, "grad_norm": 0.45307955145835876, "learning_rate": 1.7979000000000002e-05, "loss": 0.0204, "step": 5996 }, { "epoch": 11.064635272391506, "grad_norm": 0.8502277135848999, "learning_rate": 1.7982e-05, "loss": 0.0208, "step": 5997 }, { "epoch": 11.066481994459833, "grad_norm": 0.6238176822662354, "learning_rate": 1.7985e-05, "loss": 0.0282, "step": 5998 }, { "epoch": 11.068328716528162, "grad_norm": 0.42590227723121643, "learning_rate": 1.7988e-05, "loss": 0.0182, "step": 5999 }, { "epoch": 11.070175438596491, "grad_norm": 0.554565966129303, "learning_rate": 1.7991e-05, "loss": 0.0299, "step": 6000 }, { "epoch": 11.070175438596491, "eval_cer": 0.11734971615211136, "eval_loss": 0.328770250082016, "eval_runtime": 16.1595, "eval_samples_per_second": 18.812, "eval_steps_per_second": 0.619, "eval_wer": 0.41596316193399846, "step": 6000 }, { "epoch": 11.07202216066482, "grad_norm": 0.7954382300376892, "learning_rate": 1.7994e-05, "loss": 0.0324, "step": 6001 }, { "epoch": 11.073868882733148, "grad_norm": 0.3789304494857788, "learning_rate": 1.7997e-05, "loss": 0.0172, "step": 6002 }, { "epoch": 11.075715604801477, "grad_norm": 0.36132848262786865, "learning_rate": 1.8e-05, "loss": 0.0156, "step": 6003 }, { "epoch": 11.077562326869806, "grad_norm": 0.7410117387771606, "learning_rate": 1.8003e-05, "loss": 0.0261, "step": 6004 }, { "epoch": 11.079409048938135, "grad_norm": 0.46199312806129456, "learning_rate": 1.8006e-05, "loss": 0.0183, "step": 6005 }, { "epoch": 11.081255771006463, "grad_norm": 0.5138466358184814, "learning_rate": 1.8009e-05, "loss": 0.0241, "step": 6006 }, { "epoch": 11.083102493074792, "grad_norm": 0.7309773564338684, "learning_rate": 1.8012000000000003e-05, "loss": 0.0251, "step": 6007 }, { "epoch": 11.084949215143121, "grad_norm": 0.40339091420173645, "learning_rate": 1.8015000000000003e-05, "loss": 0.014, "step": 6008 }, { "epoch": 11.08679593721145, "grad_norm": 0.7696073055267334, "learning_rate": 1.8018000000000003e-05, "loss": 0.0278, "step": 6009 }, { "epoch": 11.088642659279778, "grad_norm": 0.9116730690002441, "learning_rate": 1.8021000000000002e-05, "loss": 0.0336, "step": 6010 }, { "epoch": 11.090489381348107, "grad_norm": 1.1889814138412476, "learning_rate": 1.8024e-05, "loss": 0.0413, "step": 6011 }, { "epoch": 11.092336103416436, "grad_norm": 1.3130958080291748, "learning_rate": 1.8027e-05, "loss": 0.0312, "step": 6012 }, { "epoch": 11.094182825484765, "grad_norm": 0.7441951036453247, "learning_rate": 1.803e-05, "loss": 0.2037, "step": 6013 }, { "epoch": 11.096029547553094, "grad_norm": 0.6344949007034302, "learning_rate": 1.8032999999999998e-05, "loss": 0.1776, "step": 6014 }, { "epoch": 11.097876269621421, "grad_norm": 0.8395406007766724, "learning_rate": 1.8035999999999998e-05, "loss": 0.236, "step": 6015 }, { "epoch": 11.09972299168975, "grad_norm": 0.7685767412185669, "learning_rate": 1.8038999999999998e-05, "loss": 0.1972, "step": 6016 }, { "epoch": 11.10156971375808, "grad_norm": 0.8736318945884705, "learning_rate": 1.8042e-05, "loss": 0.2188, "step": 6017 }, { "epoch": 11.103416435826409, "grad_norm": 0.5640191435813904, "learning_rate": 1.8045e-05, "loss": 0.0933, "step": 6018 }, { "epoch": 11.105263157894736, "grad_norm": 0.521339476108551, "learning_rate": 1.8048e-05, "loss": 0.0808, "step": 6019 }, { "epoch": 11.107109879963065, "grad_norm": 0.6846213936805725, "learning_rate": 1.8051e-05, "loss": 0.1305, "step": 6020 }, { "epoch": 11.108956602031395, "grad_norm": 0.7579799890518188, "learning_rate": 1.8054e-05, "loss": 0.0898, "step": 6021 }, { "epoch": 11.110803324099724, "grad_norm": 0.5198628306388855, "learning_rate": 1.8057e-05, "loss": 0.0999, "step": 6022 }, { "epoch": 11.112650046168051, "grad_norm": 0.5691999793052673, "learning_rate": 1.806e-05, "loss": 0.0805, "step": 6023 }, { "epoch": 11.11449676823638, "grad_norm": 0.5658872723579407, "learning_rate": 1.8063e-05, "loss": 0.0517, "step": 6024 }, { "epoch": 11.11634349030471, "grad_norm": 0.47111114859580994, "learning_rate": 1.8066e-05, "loss": 0.0653, "step": 6025 }, { "epoch": 11.118190212373039, "grad_norm": 0.5160207748413086, "learning_rate": 1.8069e-05, "loss": 0.0678, "step": 6026 }, { "epoch": 11.120036934441366, "grad_norm": 0.5169432163238525, "learning_rate": 1.8072000000000002e-05, "loss": 0.0425, "step": 6027 }, { "epoch": 11.121883656509695, "grad_norm": 0.3853415548801422, "learning_rate": 1.8075000000000002e-05, "loss": 0.0303, "step": 6028 }, { "epoch": 11.123730378578024, "grad_norm": 0.4425393044948578, "learning_rate": 1.8078000000000002e-05, "loss": 0.0147, "step": 6029 }, { "epoch": 11.125577100646353, "grad_norm": 0.46627745032310486, "learning_rate": 1.8081000000000002e-05, "loss": 0.0306, "step": 6030 }, { "epoch": 11.12742382271468, "grad_norm": 0.4932944178581238, "learning_rate": 1.8084e-05, "loss": 0.0487, "step": 6031 }, { "epoch": 11.12927054478301, "grad_norm": 0.43220511078834534, "learning_rate": 1.8087e-05, "loss": 0.01, "step": 6032 }, { "epoch": 11.131117266851339, "grad_norm": 1.317031741142273, "learning_rate": 1.809e-05, "loss": 0.0253, "step": 6033 }, { "epoch": 11.132963988919668, "grad_norm": 0.2857363820075989, "learning_rate": 1.8093e-05, "loss": 0.0114, "step": 6034 }, { "epoch": 11.134810710987995, "grad_norm": 1.058005690574646, "learning_rate": 1.8096e-05, "loss": 0.0329, "step": 6035 }, { "epoch": 11.136657433056325, "grad_norm": 0.6060735583305359, "learning_rate": 1.8098999999999997e-05, "loss": 0.0236, "step": 6036 }, { "epoch": 11.138504155124654, "grad_norm": 0.4277653694152832, "learning_rate": 1.8102e-05, "loss": 0.0267, "step": 6037 }, { "epoch": 11.140350877192983, "grad_norm": 0.42562127113342285, "learning_rate": 1.8105e-05, "loss": 0.0375, "step": 6038 }, { "epoch": 11.142197599261312, "grad_norm": 0.4103083312511444, "learning_rate": 1.8108e-05, "loss": 0.0179, "step": 6039 }, { "epoch": 11.14404432132964, "grad_norm": 0.5249040722846985, "learning_rate": 1.8111e-05, "loss": 0.0222, "step": 6040 }, { "epoch": 11.145891043397969, "grad_norm": 0.3144191801548004, "learning_rate": 1.8114e-05, "loss": 0.0161, "step": 6041 }, { "epoch": 11.147737765466298, "grad_norm": 0.9872508645057678, "learning_rate": 1.8117e-05, "loss": 0.0518, "step": 6042 }, { "epoch": 11.149584487534627, "grad_norm": 0.44013455510139465, "learning_rate": 1.812e-05, "loss": 0.0265, "step": 6043 }, { "epoch": 11.151431209602954, "grad_norm": 0.4432675540447235, "learning_rate": 1.8123e-05, "loss": 0.0194, "step": 6044 }, { "epoch": 11.153277931671283, "grad_norm": 0.3006099462509155, "learning_rate": 1.8126e-05, "loss": 0.0187, "step": 6045 }, { "epoch": 11.155124653739612, "grad_norm": 0.3285147547721863, "learning_rate": 1.8129e-05, "loss": 0.0195, "step": 6046 }, { "epoch": 11.156971375807942, "grad_norm": 0.29914382100105286, "learning_rate": 1.8132000000000002e-05, "loss": 0.0152, "step": 6047 }, { "epoch": 11.158818097876269, "grad_norm": 1.1159223318099976, "learning_rate": 1.8135000000000002e-05, "loss": 0.0245, "step": 6048 }, { "epoch": 11.160664819944598, "grad_norm": 0.45235589146614075, "learning_rate": 1.8138e-05, "loss": 0.0177, "step": 6049 }, { "epoch": 11.162511542012927, "grad_norm": 0.5673660039901733, "learning_rate": 1.8141e-05, "loss": 0.027, "step": 6050 }, { "epoch": 11.164358264081256, "grad_norm": 0.4398684799671173, "learning_rate": 1.8144e-05, "loss": 0.021, "step": 6051 }, { "epoch": 11.166204986149584, "grad_norm": 0.9551856517791748, "learning_rate": 1.8147e-05, "loss": 0.0465, "step": 6052 }, { "epoch": 11.168051708217913, "grad_norm": 0.7103539705276489, "learning_rate": 1.815e-05, "loss": 0.0261, "step": 6053 }, { "epoch": 11.169898430286242, "grad_norm": 1.8120602369308472, "learning_rate": 1.8153e-05, "loss": 0.0183, "step": 6054 }, { "epoch": 11.171745152354571, "grad_norm": 0.4405721127986908, "learning_rate": 1.8156e-05, "loss": 0.0201, "step": 6055 }, { "epoch": 11.173591874422899, "grad_norm": 0.4213368892669678, "learning_rate": 1.8159e-05, "loss": 0.0172, "step": 6056 }, { "epoch": 11.175438596491228, "grad_norm": 0.4763179123401642, "learning_rate": 1.8162000000000003e-05, "loss": 0.0106, "step": 6057 }, { "epoch": 11.177285318559557, "grad_norm": 0.33773693442344666, "learning_rate": 1.8165000000000003e-05, "loss": 0.0176, "step": 6058 }, { "epoch": 11.179132040627886, "grad_norm": 0.7333492636680603, "learning_rate": 1.8168000000000003e-05, "loss": 0.0281, "step": 6059 }, { "epoch": 11.180978762696213, "grad_norm": 0.3161907494068146, "learning_rate": 1.8171e-05, "loss": 0.0119, "step": 6060 }, { "epoch": 11.182825484764543, "grad_norm": 0.7705785632133484, "learning_rate": 1.8174e-05, "loss": 0.024, "step": 6061 }, { "epoch": 11.184672206832872, "grad_norm": 0.6240018606185913, "learning_rate": 1.8177e-05, "loss": 0.0143, "step": 6062 }, { "epoch": 11.1865189289012, "grad_norm": 1.3212826251983643, "learning_rate": 1.818e-05, "loss": 0.3074, "step": 6063 }, { "epoch": 11.18836565096953, "grad_norm": 0.6692222356796265, "learning_rate": 1.8183e-05, "loss": 0.1793, "step": 6064 }, { "epoch": 11.190212373037857, "grad_norm": 0.6634356379508972, "learning_rate": 1.8186e-05, "loss": 0.14, "step": 6065 }, { "epoch": 11.192059095106186, "grad_norm": 0.6485190987586975, "learning_rate": 1.8188999999999998e-05, "loss": 0.1288, "step": 6066 }, { "epoch": 11.193905817174516, "grad_norm": 0.7990904450416565, "learning_rate": 1.8192e-05, "loss": 0.1893, "step": 6067 }, { "epoch": 11.195752539242845, "grad_norm": 0.7302971482276917, "learning_rate": 1.8195e-05, "loss": 0.1558, "step": 6068 }, { "epoch": 11.197599261311172, "grad_norm": 0.6865899562835693, "learning_rate": 1.8198e-05, "loss": 0.1101, "step": 6069 }, { "epoch": 11.199445983379501, "grad_norm": 0.6729971766471863, "learning_rate": 1.8201e-05, "loss": 0.1269, "step": 6070 }, { "epoch": 11.20129270544783, "grad_norm": 0.725504457950592, "learning_rate": 1.8204e-05, "loss": 0.13, "step": 6071 }, { "epoch": 11.20313942751616, "grad_norm": 0.5052322149276733, "learning_rate": 1.8207e-05, "loss": 0.0995, "step": 6072 }, { "epoch": 11.204986149584487, "grad_norm": 0.8587621450424194, "learning_rate": 1.821e-05, "loss": 0.1422, "step": 6073 }, { "epoch": 11.206832871652816, "grad_norm": 0.4450884461402893, "learning_rate": 1.8213e-05, "loss": 0.0913, "step": 6074 }, { "epoch": 11.208679593721145, "grad_norm": 0.6525385975837708, "learning_rate": 1.8216e-05, "loss": 0.0399, "step": 6075 }, { "epoch": 11.210526315789474, "grad_norm": 0.4793529510498047, "learning_rate": 1.8219e-05, "loss": 0.0398, "step": 6076 }, { "epoch": 11.212373037857802, "grad_norm": 0.6193368434906006, "learning_rate": 1.8222000000000003e-05, "loss": 0.0483, "step": 6077 }, { "epoch": 11.21421975992613, "grad_norm": 0.2656480669975281, "learning_rate": 1.8225000000000003e-05, "loss": 0.0243, "step": 6078 }, { "epoch": 11.21606648199446, "grad_norm": 0.3633423447608948, "learning_rate": 1.8228000000000002e-05, "loss": 0.0286, "step": 6079 }, { "epoch": 11.21791320406279, "grad_norm": 0.4554925858974457, "learning_rate": 1.8231000000000002e-05, "loss": 0.0353, "step": 6080 }, { "epoch": 11.219759926131117, "grad_norm": 0.3139352798461914, "learning_rate": 1.8234000000000002e-05, "loss": 0.0155, "step": 6081 }, { "epoch": 11.221606648199446, "grad_norm": 0.5080939531326294, "learning_rate": 1.8237000000000002e-05, "loss": 0.0253, "step": 6082 }, { "epoch": 11.223453370267775, "grad_norm": 0.4644097089767456, "learning_rate": 1.824e-05, "loss": 0.0212, "step": 6083 }, { "epoch": 11.225300092336104, "grad_norm": 0.6743317246437073, "learning_rate": 1.8243e-05, "loss": 0.0294, "step": 6084 }, { "epoch": 11.227146814404431, "grad_norm": 0.3275095224380493, "learning_rate": 1.8245999999999998e-05, "loss": 0.027, "step": 6085 }, { "epoch": 11.22899353647276, "grad_norm": 0.5828282833099365, "learning_rate": 1.8248999999999998e-05, "loss": 0.027, "step": 6086 }, { "epoch": 11.23084025854109, "grad_norm": 0.4786163866519928, "learning_rate": 1.8252e-05, "loss": 0.0278, "step": 6087 }, { "epoch": 11.232686980609419, "grad_norm": 0.35602590441703796, "learning_rate": 1.8255e-05, "loss": 0.0196, "step": 6088 }, { "epoch": 11.234533702677748, "grad_norm": 0.9777085185050964, "learning_rate": 1.8258e-05, "loss": 0.0278, "step": 6089 }, { "epoch": 11.236380424746075, "grad_norm": 0.6795394420623779, "learning_rate": 1.8261e-05, "loss": 0.028, "step": 6090 }, { "epoch": 11.238227146814404, "grad_norm": 0.5188271403312683, "learning_rate": 1.8264e-05, "loss": 0.0392, "step": 6091 }, { "epoch": 11.240073868882734, "grad_norm": 0.4623538851737976, "learning_rate": 1.8267e-05, "loss": 0.0272, "step": 6092 }, { "epoch": 11.241920590951063, "grad_norm": 0.7097045183181763, "learning_rate": 1.827e-05, "loss": 0.0191, "step": 6093 }, { "epoch": 11.24376731301939, "grad_norm": 0.4057754874229431, "learning_rate": 1.8273e-05, "loss": 0.0201, "step": 6094 }, { "epoch": 11.24561403508772, "grad_norm": 0.5271565914154053, "learning_rate": 1.8276e-05, "loss": 0.019, "step": 6095 }, { "epoch": 11.247460757156048, "grad_norm": 0.84198397397995, "learning_rate": 1.8279e-05, "loss": 0.0287, "step": 6096 }, { "epoch": 11.249307479224377, "grad_norm": 0.3388877213001251, "learning_rate": 1.8282000000000002e-05, "loss": 0.0182, "step": 6097 }, { "epoch": 11.251154201292705, "grad_norm": 0.4032248854637146, "learning_rate": 1.8285000000000002e-05, "loss": 0.0219, "step": 6098 }, { "epoch": 11.253000923361034, "grad_norm": 0.6903819441795349, "learning_rate": 1.8288000000000002e-05, "loss": 0.0176, "step": 6099 }, { "epoch": 11.254847645429363, "grad_norm": 0.46638402342796326, "learning_rate": 1.8291e-05, "loss": 0.0293, "step": 6100 }, { "epoch": 11.256694367497692, "grad_norm": 0.4857083559036255, "learning_rate": 1.8294e-05, "loss": 0.0208, "step": 6101 }, { "epoch": 11.25854108956602, "grad_norm": 0.5676124095916748, "learning_rate": 1.8297e-05, "loss": 0.0231, "step": 6102 }, { "epoch": 11.260387811634349, "grad_norm": 0.3779642581939697, "learning_rate": 1.83e-05, "loss": 0.0264, "step": 6103 }, { "epoch": 11.262234533702678, "grad_norm": 0.30721354484558105, "learning_rate": 1.8303e-05, "loss": 0.0181, "step": 6104 }, { "epoch": 11.264081255771007, "grad_norm": 0.6499035358428955, "learning_rate": 1.8306e-05, "loss": 0.0265, "step": 6105 }, { "epoch": 11.265927977839334, "grad_norm": 0.5521036386489868, "learning_rate": 1.8309e-05, "loss": 0.0199, "step": 6106 }, { "epoch": 11.267774699907664, "grad_norm": 0.7346552610397339, "learning_rate": 1.8312000000000004e-05, "loss": 0.0283, "step": 6107 }, { "epoch": 11.269621421975993, "grad_norm": 0.5562924742698669, "learning_rate": 1.8315000000000003e-05, "loss": 0.0279, "step": 6108 }, { "epoch": 11.271468144044322, "grad_norm": 0.6505104899406433, "learning_rate": 1.8318e-05, "loss": 0.0257, "step": 6109 }, { "epoch": 11.27331486611265, "grad_norm": 0.8869682550430298, "learning_rate": 1.8321e-05, "loss": 0.0219, "step": 6110 }, { "epoch": 11.275161588180978, "grad_norm": 0.5087664723396301, "learning_rate": 1.8324e-05, "loss": 0.0255, "step": 6111 }, { "epoch": 11.277008310249307, "grad_norm": 0.5345026254653931, "learning_rate": 1.8327e-05, "loss": 0.0456, "step": 6112 }, { "epoch": 11.278855032317637, "grad_norm": 0.959027886390686, "learning_rate": 1.833e-05, "loss": 0.2465, "step": 6113 }, { "epoch": 11.280701754385966, "grad_norm": 1.0771859884262085, "learning_rate": 1.8333e-05, "loss": 0.2627, "step": 6114 }, { "epoch": 11.282548476454293, "grad_norm": 0.6319298148155212, "learning_rate": 1.8336e-05, "loss": 0.1879, "step": 6115 }, { "epoch": 11.284395198522622, "grad_norm": 0.6334912180900574, "learning_rate": 1.8339e-05, "loss": 0.1726, "step": 6116 }, { "epoch": 11.286241920590951, "grad_norm": 0.6496388912200928, "learning_rate": 1.8342e-05, "loss": 0.0909, "step": 6117 }, { "epoch": 11.28808864265928, "grad_norm": 1.0599894523620605, "learning_rate": 1.8345e-05, "loss": 0.1608, "step": 6118 }, { "epoch": 11.289935364727608, "grad_norm": 0.7013213634490967, "learning_rate": 1.8348e-05, "loss": 0.1448, "step": 6119 }, { "epoch": 11.291782086795937, "grad_norm": 0.5151294469833374, "learning_rate": 1.8351e-05, "loss": 0.1267, "step": 6120 }, { "epoch": 11.293628808864266, "grad_norm": 0.6530570983886719, "learning_rate": 1.8354e-05, "loss": 0.108, "step": 6121 }, { "epoch": 11.295475530932595, "grad_norm": 0.672336757183075, "learning_rate": 1.8357e-05, "loss": 0.1433, "step": 6122 }, { "epoch": 11.297322253000923, "grad_norm": 0.5117024779319763, "learning_rate": 1.836e-05, "loss": 0.0575, "step": 6123 }, { "epoch": 11.299168975069252, "grad_norm": 0.4188872277736664, "learning_rate": 1.8363e-05, "loss": 0.0657, "step": 6124 }, { "epoch": 11.301015697137581, "grad_norm": 0.5835525393486023, "learning_rate": 1.8366e-05, "loss": 0.0622, "step": 6125 }, { "epoch": 11.30286241920591, "grad_norm": 0.9023607969284058, "learning_rate": 1.8369e-05, "loss": 0.0952, "step": 6126 }, { "epoch": 11.304709141274238, "grad_norm": 0.35532012581825256, "learning_rate": 1.8372000000000003e-05, "loss": 0.025, "step": 6127 }, { "epoch": 11.306555863342567, "grad_norm": 0.37712958455085754, "learning_rate": 1.8375000000000003e-05, "loss": 0.025, "step": 6128 }, { "epoch": 11.308402585410896, "grad_norm": 0.4042429029941559, "learning_rate": 1.8378000000000003e-05, "loss": 0.026, "step": 6129 }, { "epoch": 11.310249307479225, "grad_norm": 0.461452454328537, "learning_rate": 1.8381000000000002e-05, "loss": 0.029, "step": 6130 }, { "epoch": 11.312096029547552, "grad_norm": 0.32719576358795166, "learning_rate": 1.8384000000000002e-05, "loss": 0.0254, "step": 6131 }, { "epoch": 11.313942751615881, "grad_norm": 0.559114396572113, "learning_rate": 1.8387000000000002e-05, "loss": 0.0451, "step": 6132 }, { "epoch": 11.31578947368421, "grad_norm": 0.38609135150909424, "learning_rate": 1.8390000000000002e-05, "loss": 0.0148, "step": 6133 }, { "epoch": 11.31763619575254, "grad_norm": 0.3838483393192291, "learning_rate": 1.8392999999999998e-05, "loss": 0.0324, "step": 6134 }, { "epoch": 11.319482917820867, "grad_norm": 0.6396909952163696, "learning_rate": 1.8395999999999998e-05, "loss": 0.0264, "step": 6135 }, { "epoch": 11.321329639889196, "grad_norm": 0.7127343416213989, "learning_rate": 1.8398999999999998e-05, "loss": 0.0166, "step": 6136 }, { "epoch": 11.323176361957525, "grad_norm": 0.8143025636672974, "learning_rate": 1.8401999999999998e-05, "loss": 0.0261, "step": 6137 }, { "epoch": 11.325023084025855, "grad_norm": 1.8625601530075073, "learning_rate": 1.8405e-05, "loss": 0.0181, "step": 6138 }, { "epoch": 11.326869806094184, "grad_norm": 0.5424591302871704, "learning_rate": 1.8408e-05, "loss": 0.0152, "step": 6139 }, { "epoch": 11.328716528162511, "grad_norm": 0.31574827432632446, "learning_rate": 1.8411e-05, "loss": 0.01, "step": 6140 }, { "epoch": 11.33056325023084, "grad_norm": 0.47474777698516846, "learning_rate": 1.8414e-05, "loss": 0.0234, "step": 6141 }, { "epoch": 11.33240997229917, "grad_norm": 0.48354342579841614, "learning_rate": 1.8417e-05, "loss": 0.0156, "step": 6142 }, { "epoch": 11.334256694367498, "grad_norm": 0.9422298669815063, "learning_rate": 1.842e-05, "loss": 0.0511, "step": 6143 }, { "epoch": 11.336103416435826, "grad_norm": 0.9375277161598206, "learning_rate": 1.8423e-05, "loss": 0.0296, "step": 6144 }, { "epoch": 11.337950138504155, "grad_norm": 0.85575270652771, "learning_rate": 1.8426e-05, "loss": 0.0257, "step": 6145 }, { "epoch": 11.339796860572484, "grad_norm": 0.4999704957008362, "learning_rate": 1.8429e-05, "loss": 0.0224, "step": 6146 }, { "epoch": 11.341643582640813, "grad_norm": 0.7743487358093262, "learning_rate": 1.8432e-05, "loss": 0.0349, "step": 6147 }, { "epoch": 11.34349030470914, "grad_norm": 0.547167956829071, "learning_rate": 1.8435000000000002e-05, "loss": 0.0262, "step": 6148 }, { "epoch": 11.34533702677747, "grad_norm": 0.46982502937316895, "learning_rate": 1.8438000000000002e-05, "loss": 0.0505, "step": 6149 }, { "epoch": 11.347183748845799, "grad_norm": 0.4561302959918976, "learning_rate": 1.8441000000000002e-05, "loss": 0.019, "step": 6150 }, { "epoch": 11.349030470914128, "grad_norm": 0.556189239025116, "learning_rate": 1.8444e-05, "loss": 0.0222, "step": 6151 }, { "epoch": 11.350877192982455, "grad_norm": 2.062488555908203, "learning_rate": 1.8447e-05, "loss": 0.0387, "step": 6152 }, { "epoch": 11.352723915050785, "grad_norm": 0.48797592520713806, "learning_rate": 1.845e-05, "loss": 0.0207, "step": 6153 }, { "epoch": 11.354570637119114, "grad_norm": 0.6511245965957642, "learning_rate": 1.8453e-05, "loss": 0.0292, "step": 6154 }, { "epoch": 11.356417359187443, "grad_norm": 0.4582718014717102, "learning_rate": 1.8456e-05, "loss": 0.0186, "step": 6155 }, { "epoch": 11.35826408125577, "grad_norm": 0.567861020565033, "learning_rate": 1.8459e-05, "loss": 0.0283, "step": 6156 }, { "epoch": 11.3601108033241, "grad_norm": 0.6611009240150452, "learning_rate": 1.8462e-05, "loss": 0.0327, "step": 6157 }, { "epoch": 11.361957525392429, "grad_norm": 0.4739968776702881, "learning_rate": 1.8465e-05, "loss": 0.024, "step": 6158 }, { "epoch": 11.363804247460758, "grad_norm": 0.5592001676559448, "learning_rate": 1.8468e-05, "loss": 0.0183, "step": 6159 }, { "epoch": 11.365650969529085, "grad_norm": 0.6979249119758606, "learning_rate": 1.8471e-05, "loss": 0.0497, "step": 6160 }, { "epoch": 11.367497691597414, "grad_norm": 0.5059146881103516, "learning_rate": 1.8474e-05, "loss": 0.0225, "step": 6161 }, { "epoch": 11.369344413665743, "grad_norm": 0.6794499754905701, "learning_rate": 1.8477e-05, "loss": 0.0312, "step": 6162 }, { "epoch": 11.371191135734072, "grad_norm": 1.0225704908370972, "learning_rate": 1.848e-05, "loss": 0.2726, "step": 6163 }, { "epoch": 11.373037857802402, "grad_norm": 0.9803306460380554, "learning_rate": 1.8483e-05, "loss": 0.2974, "step": 6164 }, { "epoch": 11.374884579870729, "grad_norm": 0.6645657420158386, "learning_rate": 1.8486e-05, "loss": 0.1743, "step": 6165 }, { "epoch": 11.376731301939058, "grad_norm": 0.9934237003326416, "learning_rate": 1.8489e-05, "loss": 0.1558, "step": 6166 }, { "epoch": 11.378578024007387, "grad_norm": 0.7072994709014893, "learning_rate": 1.8492e-05, "loss": 0.1928, "step": 6167 }, { "epoch": 11.380424746075716, "grad_norm": 0.7351928353309631, "learning_rate": 1.8495e-05, "loss": 0.1044, "step": 6168 }, { "epoch": 11.382271468144044, "grad_norm": 0.6384725570678711, "learning_rate": 1.8498e-05, "loss": 0.1161, "step": 6169 }, { "epoch": 11.384118190212373, "grad_norm": 0.5761701464653015, "learning_rate": 1.8501e-05, "loss": 0.0978, "step": 6170 }, { "epoch": 11.385964912280702, "grad_norm": 0.7269904613494873, "learning_rate": 1.8504e-05, "loss": 0.1354, "step": 6171 }, { "epoch": 11.387811634349031, "grad_norm": 0.6286356449127197, "learning_rate": 1.8507e-05, "loss": 0.0969, "step": 6172 }, { "epoch": 11.389658356417359, "grad_norm": 0.6741904616355896, "learning_rate": 1.851e-05, "loss": 0.1379, "step": 6173 }, { "epoch": 11.391505078485688, "grad_norm": 0.5379699468612671, "learning_rate": 1.8513e-05, "loss": 0.0718, "step": 6174 }, { "epoch": 11.393351800554017, "grad_norm": 0.6479658484458923, "learning_rate": 1.8516e-05, "loss": 0.1091, "step": 6175 }, { "epoch": 11.395198522622346, "grad_norm": 1.7053743600845337, "learning_rate": 1.8519e-05, "loss": 0.1225, "step": 6176 }, { "epoch": 11.397045244690673, "grad_norm": 0.46991604566574097, "learning_rate": 1.8522e-05, "loss": 0.0673, "step": 6177 }, { "epoch": 11.398891966759003, "grad_norm": 0.8959211707115173, "learning_rate": 1.8525000000000003e-05, "loss": 0.0555, "step": 6178 }, { "epoch": 11.400738688827332, "grad_norm": 0.554658830165863, "learning_rate": 1.8528000000000003e-05, "loss": 0.0563, "step": 6179 }, { "epoch": 11.40258541089566, "grad_norm": 0.3450296223163605, "learning_rate": 1.8531000000000003e-05, "loss": 0.0186, "step": 6180 }, { "epoch": 11.404432132963988, "grad_norm": 0.7503721714019775, "learning_rate": 1.8534000000000002e-05, "loss": 0.0257, "step": 6181 }, { "epoch": 11.406278855032317, "grad_norm": 0.41149789094924927, "learning_rate": 1.8537000000000002e-05, "loss": 0.0299, "step": 6182 }, { "epoch": 11.408125577100646, "grad_norm": 0.8231745362281799, "learning_rate": 1.854e-05, "loss": 0.0378, "step": 6183 }, { "epoch": 11.409972299168976, "grad_norm": 0.61480313539505, "learning_rate": 1.8543e-05, "loss": 0.0604, "step": 6184 }, { "epoch": 11.411819021237303, "grad_norm": 0.42844507098197937, "learning_rate": 1.8545999999999998e-05, "loss": 0.0346, "step": 6185 }, { "epoch": 11.413665743305632, "grad_norm": 0.4954689145088196, "learning_rate": 1.8548999999999998e-05, "loss": 0.0253, "step": 6186 }, { "epoch": 11.415512465373961, "grad_norm": 0.35354283452033997, "learning_rate": 1.8551999999999998e-05, "loss": 0.0176, "step": 6187 }, { "epoch": 11.41735918744229, "grad_norm": 0.3825988173484802, "learning_rate": 1.8555e-05, "loss": 0.0297, "step": 6188 }, { "epoch": 11.41920590951062, "grad_norm": 0.43082669377326965, "learning_rate": 1.8558e-05, "loss": 0.0344, "step": 6189 }, { "epoch": 11.421052631578947, "grad_norm": 0.5307301878929138, "learning_rate": 1.8561e-05, "loss": 0.0179, "step": 6190 }, { "epoch": 11.422899353647276, "grad_norm": 0.6526667475700378, "learning_rate": 1.8564e-05, "loss": 0.0481, "step": 6191 }, { "epoch": 11.424746075715605, "grad_norm": 0.42508047819137573, "learning_rate": 1.8567e-05, "loss": 0.0164, "step": 6192 }, { "epoch": 11.426592797783934, "grad_norm": 0.4299699664115906, "learning_rate": 1.857e-05, "loss": 0.023, "step": 6193 }, { "epoch": 11.428439519852262, "grad_norm": 0.43284791707992554, "learning_rate": 1.8573e-05, "loss": 0.0188, "step": 6194 }, { "epoch": 11.43028624192059, "grad_norm": 0.6751835942268372, "learning_rate": 1.8576e-05, "loss": 0.0214, "step": 6195 }, { "epoch": 11.43213296398892, "grad_norm": 0.4137125015258789, "learning_rate": 1.8579e-05, "loss": 0.0177, "step": 6196 }, { "epoch": 11.43397968605725, "grad_norm": 0.48743513226509094, "learning_rate": 1.8582e-05, "loss": 0.0232, "step": 6197 }, { "epoch": 11.435826408125576, "grad_norm": 0.4469239413738251, "learning_rate": 1.8585000000000002e-05, "loss": 0.0229, "step": 6198 }, { "epoch": 11.437673130193906, "grad_norm": 0.30634063482284546, "learning_rate": 1.8588000000000002e-05, "loss": 0.0131, "step": 6199 }, { "epoch": 11.439519852262235, "grad_norm": 0.774604082107544, "learning_rate": 1.8591000000000002e-05, "loss": 0.0274, "step": 6200 }, { "epoch": 11.441366574330564, "grad_norm": 0.377434641122818, "learning_rate": 1.8594000000000002e-05, "loss": 0.0169, "step": 6201 }, { "epoch": 11.443213296398891, "grad_norm": 0.41127023100852966, "learning_rate": 1.8597e-05, "loss": 0.0165, "step": 6202 }, { "epoch": 11.44506001846722, "grad_norm": 0.40584078431129456, "learning_rate": 1.86e-05, "loss": 0.0197, "step": 6203 }, { "epoch": 11.44690674053555, "grad_norm": 0.5642501711845398, "learning_rate": 1.8603e-05, "loss": 0.0229, "step": 6204 }, { "epoch": 11.448753462603879, "grad_norm": 0.6988409757614136, "learning_rate": 1.8606e-05, "loss": 0.0124, "step": 6205 }, { "epoch": 11.450600184672206, "grad_norm": 0.958296000957489, "learning_rate": 1.8609e-05, "loss": 0.0309, "step": 6206 }, { "epoch": 11.452446906740535, "grad_norm": 0.7275720238685608, "learning_rate": 1.8612e-05, "loss": 0.019, "step": 6207 }, { "epoch": 11.454293628808864, "grad_norm": 0.789011538028717, "learning_rate": 1.8615e-05, "loss": 0.0253, "step": 6208 }, { "epoch": 11.456140350877194, "grad_norm": 0.9203380346298218, "learning_rate": 1.8618e-05, "loss": 0.0239, "step": 6209 }, { "epoch": 11.45798707294552, "grad_norm": 0.726800799369812, "learning_rate": 1.8621e-05, "loss": 0.0213, "step": 6210 }, { "epoch": 11.45983379501385, "grad_norm": 0.308773398399353, "learning_rate": 1.8624e-05, "loss": 0.0125, "step": 6211 }, { "epoch": 11.46168051708218, "grad_norm": 0.7262300848960876, "learning_rate": 1.8627e-05, "loss": 0.0334, "step": 6212 }, { "epoch": 11.463527239150508, "grad_norm": 2.23403000831604, "learning_rate": 1.863e-05, "loss": 0.3048, "step": 6213 }, { "epoch": 11.465373961218837, "grad_norm": 1.738344430923462, "learning_rate": 1.8633e-05, "loss": 0.3017, "step": 6214 }, { "epoch": 11.467220683287165, "grad_norm": 0.6851328015327454, "learning_rate": 1.8636e-05, "loss": 0.2065, "step": 6215 }, { "epoch": 11.469067405355494, "grad_norm": 0.9628781080245972, "learning_rate": 1.8639e-05, "loss": 0.2222, "step": 6216 }, { "epoch": 11.470914127423823, "grad_norm": 0.6785116195678711, "learning_rate": 1.8642e-05, "loss": 0.1355, "step": 6217 }, { "epoch": 11.472760849492152, "grad_norm": 0.8106775879859924, "learning_rate": 1.8645000000000002e-05, "loss": 0.1877, "step": 6218 }, { "epoch": 11.47460757156048, "grad_norm": 0.8037397861480713, "learning_rate": 1.8648000000000002e-05, "loss": 0.1401, "step": 6219 }, { "epoch": 11.476454293628809, "grad_norm": 0.49321749806404114, "learning_rate": 1.8651e-05, "loss": 0.1098, "step": 6220 }, { "epoch": 11.478301015697138, "grad_norm": 0.5776592493057251, "learning_rate": 1.8654e-05, "loss": 0.0997, "step": 6221 }, { "epoch": 11.480147737765467, "grad_norm": 0.9313098192214966, "learning_rate": 1.8657e-05, "loss": 0.1167, "step": 6222 }, { "epoch": 11.481994459833794, "grad_norm": 0.700075626373291, "learning_rate": 1.866e-05, "loss": 0.086, "step": 6223 }, { "epoch": 11.483841181902124, "grad_norm": 0.33606475591659546, "learning_rate": 1.8663e-05, "loss": 0.0487, "step": 6224 }, { "epoch": 11.485687903970453, "grad_norm": 0.6985853910446167, "learning_rate": 1.8666e-05, "loss": 0.0404, "step": 6225 }, { "epoch": 11.487534626038782, "grad_norm": 0.6496925354003906, "learning_rate": 1.8669e-05, "loss": 0.0782, "step": 6226 }, { "epoch": 11.48938134810711, "grad_norm": 1.1872271299362183, "learning_rate": 1.8672e-05, "loss": 0.0995, "step": 6227 }, { "epoch": 11.491228070175438, "grad_norm": 0.6918002367019653, "learning_rate": 1.8675000000000003e-05, "loss": 0.0593, "step": 6228 }, { "epoch": 11.493074792243767, "grad_norm": 0.3916986584663391, "learning_rate": 1.8678000000000003e-05, "loss": 0.0308, "step": 6229 }, { "epoch": 11.494921514312097, "grad_norm": 0.46228450536727905, "learning_rate": 1.8681000000000003e-05, "loss": 0.0223, "step": 6230 }, { "epoch": 11.496768236380424, "grad_norm": 0.48111462593078613, "learning_rate": 1.8684000000000003e-05, "loss": 0.0312, "step": 6231 }, { "epoch": 11.498614958448753, "grad_norm": 0.38682618737220764, "learning_rate": 1.8687e-05, "loss": 0.0189, "step": 6232 }, { "epoch": 11.500461680517082, "grad_norm": 0.3328859210014343, "learning_rate": 1.869e-05, "loss": 0.0166, "step": 6233 }, { "epoch": 11.502308402585411, "grad_norm": 0.512199878692627, "learning_rate": 1.8693e-05, "loss": 0.0185, "step": 6234 }, { "epoch": 11.504155124653739, "grad_norm": 0.524346113204956, "learning_rate": 1.8696e-05, "loss": 0.0273, "step": 6235 }, { "epoch": 11.506001846722068, "grad_norm": 0.5263787508010864, "learning_rate": 1.8699e-05, "loss": 0.0285, "step": 6236 }, { "epoch": 11.507848568790397, "grad_norm": 0.6654219031333923, "learning_rate": 1.8701999999999998e-05, "loss": 0.0287, "step": 6237 }, { "epoch": 11.509695290858726, "grad_norm": 0.4475488066673279, "learning_rate": 1.8705e-05, "loss": 0.043, "step": 6238 }, { "epoch": 11.511542012927055, "grad_norm": 0.4326750934123993, "learning_rate": 1.8708e-05, "loss": 0.03, "step": 6239 }, { "epoch": 11.513388734995383, "grad_norm": 0.4968683123588562, "learning_rate": 1.8711e-05, "loss": 0.0169, "step": 6240 }, { "epoch": 11.515235457063712, "grad_norm": 0.9535714983940125, "learning_rate": 1.8714e-05, "loss": 0.0247, "step": 6241 }, { "epoch": 11.517082179132041, "grad_norm": 0.7456192970275879, "learning_rate": 1.8717e-05, "loss": 0.0248, "step": 6242 }, { "epoch": 11.51892890120037, "grad_norm": 0.23908020555973053, "learning_rate": 1.872e-05, "loss": 0.0109, "step": 6243 }, { "epoch": 11.520775623268698, "grad_norm": 0.42601415514945984, "learning_rate": 1.8723e-05, "loss": 0.0504, "step": 6244 }, { "epoch": 11.522622345337027, "grad_norm": 0.7600030899047852, "learning_rate": 1.8726e-05, "loss": 0.0212, "step": 6245 }, { "epoch": 11.524469067405356, "grad_norm": 0.8499584197998047, "learning_rate": 1.8729e-05, "loss": 0.0238, "step": 6246 }, { "epoch": 11.526315789473685, "grad_norm": 0.5940492749214172, "learning_rate": 1.8732e-05, "loss": 0.0173, "step": 6247 }, { "epoch": 11.528162511542012, "grad_norm": 0.3725167512893677, "learning_rate": 1.8735000000000003e-05, "loss": 0.02, "step": 6248 }, { "epoch": 11.530009233610341, "grad_norm": 0.6236770749092102, "learning_rate": 1.8738000000000003e-05, "loss": 0.0195, "step": 6249 }, { "epoch": 11.53185595567867, "grad_norm": 0.4146452248096466, "learning_rate": 1.8741000000000002e-05, "loss": 0.0167, "step": 6250 }, { "epoch": 11.533702677747, "grad_norm": 0.3959144353866577, "learning_rate": 1.8744000000000002e-05, "loss": 0.0193, "step": 6251 }, { "epoch": 11.535549399815327, "grad_norm": 0.6457730531692505, "learning_rate": 1.8747000000000002e-05, "loss": 0.0273, "step": 6252 }, { "epoch": 11.537396121883656, "grad_norm": 0.6635228991508484, "learning_rate": 1.8750000000000002e-05, "loss": 0.0299, "step": 6253 }, { "epoch": 11.539242843951985, "grad_norm": 0.41056013107299805, "learning_rate": 1.8753e-05, "loss": 0.0349, "step": 6254 }, { "epoch": 11.541089566020315, "grad_norm": 0.3606818616390228, "learning_rate": 1.8756e-05, "loss": 0.0117, "step": 6255 }, { "epoch": 11.542936288088642, "grad_norm": 1.001387357711792, "learning_rate": 1.8759e-05, "loss": 0.0178, "step": 6256 }, { "epoch": 11.544783010156971, "grad_norm": 0.4944041967391968, "learning_rate": 1.8761999999999998e-05, "loss": 0.0241, "step": 6257 }, { "epoch": 11.5466297322253, "grad_norm": 0.4109015166759491, "learning_rate": 1.8764999999999997e-05, "loss": 0.0159, "step": 6258 }, { "epoch": 11.54847645429363, "grad_norm": 0.4980219006538391, "learning_rate": 1.8768e-05, "loss": 0.0221, "step": 6259 }, { "epoch": 11.550323176361957, "grad_norm": 0.5004519820213318, "learning_rate": 1.8771e-05, "loss": 0.025, "step": 6260 }, { "epoch": 11.552169898430286, "grad_norm": 0.7837686538696289, "learning_rate": 1.8774e-05, "loss": 0.0397, "step": 6261 }, { "epoch": 11.554016620498615, "grad_norm": 1.5615583658218384, "learning_rate": 1.8777e-05, "loss": 0.0869, "step": 6262 }, { "epoch": 11.555863342566944, "grad_norm": 0.7100450992584229, "learning_rate": 1.878e-05, "loss": 0.2417, "step": 6263 }, { "epoch": 11.557710064635273, "grad_norm": 0.5554599761962891, "learning_rate": 1.8783e-05, "loss": 0.1802, "step": 6264 }, { "epoch": 11.5595567867036, "grad_norm": 0.6480225324630737, "learning_rate": 1.8786e-05, "loss": 0.1449, "step": 6265 }, { "epoch": 11.56140350877193, "grad_norm": 0.6713318228721619, "learning_rate": 1.8789e-05, "loss": 0.1696, "step": 6266 }, { "epoch": 11.563250230840259, "grad_norm": 0.7170022130012512, "learning_rate": 1.8792e-05, "loss": 0.1916, "step": 6267 }, { "epoch": 11.565096952908588, "grad_norm": 0.5778694748878479, "learning_rate": 1.8795e-05, "loss": 0.126, "step": 6268 }, { "epoch": 11.566943674976915, "grad_norm": 0.5431328415870667, "learning_rate": 1.8798000000000002e-05, "loss": 0.108, "step": 6269 }, { "epoch": 11.568790397045245, "grad_norm": 1.2238951921463013, "learning_rate": 1.8801000000000002e-05, "loss": 0.1114, "step": 6270 }, { "epoch": 11.570637119113574, "grad_norm": 0.8291918039321899, "learning_rate": 1.8804e-05, "loss": 0.0839, "step": 6271 }, { "epoch": 11.572483841181903, "grad_norm": 0.6827449202537537, "learning_rate": 1.8807e-05, "loss": 0.1024, "step": 6272 }, { "epoch": 11.57433056325023, "grad_norm": 0.4937664568424225, "learning_rate": 1.881e-05, "loss": 0.0618, "step": 6273 }, { "epoch": 11.57617728531856, "grad_norm": 0.4901401400566101, "learning_rate": 1.8813e-05, "loss": 0.0651, "step": 6274 }, { "epoch": 11.578024007386889, "grad_norm": 0.8407455086708069, "learning_rate": 1.8816e-05, "loss": 0.1219, "step": 6275 }, { "epoch": 11.579870729455218, "grad_norm": 0.5447347164154053, "learning_rate": 1.8819e-05, "loss": 0.0682, "step": 6276 }, { "epoch": 11.581717451523545, "grad_norm": 0.3770175278186798, "learning_rate": 1.8822e-05, "loss": 0.0362, "step": 6277 }, { "epoch": 11.583564173591874, "grad_norm": 0.48820844292640686, "learning_rate": 1.8825e-05, "loss": 0.0351, "step": 6278 }, { "epoch": 11.585410895660203, "grad_norm": 0.5128350257873535, "learning_rate": 1.8828000000000003e-05, "loss": 0.0315, "step": 6279 }, { "epoch": 11.587257617728532, "grad_norm": 0.6119294762611389, "learning_rate": 1.8831000000000003e-05, "loss": 0.0252, "step": 6280 }, { "epoch": 11.58910433979686, "grad_norm": 0.5184559226036072, "learning_rate": 1.8834e-05, "loss": 0.0221, "step": 6281 }, { "epoch": 11.590951061865189, "grad_norm": 0.6163107752799988, "learning_rate": 1.8837e-05, "loss": 0.0258, "step": 6282 }, { "epoch": 11.592797783933518, "grad_norm": 0.5317324995994568, "learning_rate": 1.884e-05, "loss": 0.033, "step": 6283 }, { "epoch": 11.594644506001847, "grad_norm": 0.4561191201210022, "learning_rate": 1.8843e-05, "loss": 0.0296, "step": 6284 }, { "epoch": 11.596491228070175, "grad_norm": 0.3529854714870453, "learning_rate": 1.8846e-05, "loss": 0.0212, "step": 6285 }, { "epoch": 11.598337950138504, "grad_norm": 0.39690741896629333, "learning_rate": 1.8849e-05, "loss": 0.016, "step": 6286 }, { "epoch": 11.600184672206833, "grad_norm": 0.6497731804847717, "learning_rate": 1.8852e-05, "loss": 0.0233, "step": 6287 }, { "epoch": 11.602031394275162, "grad_norm": 0.5435981750488281, "learning_rate": 1.8854999999999998e-05, "loss": 0.0331, "step": 6288 }, { "epoch": 11.603878116343491, "grad_norm": 0.724307119846344, "learning_rate": 1.8858e-05, "loss": 0.029, "step": 6289 }, { "epoch": 11.605724838411819, "grad_norm": 0.4700302183628082, "learning_rate": 1.8861e-05, "loss": 0.0328, "step": 6290 }, { "epoch": 11.607571560480148, "grad_norm": 0.39719411730766296, "learning_rate": 1.8864e-05, "loss": 0.0263, "step": 6291 }, { "epoch": 11.609418282548477, "grad_norm": 0.38685211539268494, "learning_rate": 1.8867e-05, "loss": 0.0182, "step": 6292 }, { "epoch": 11.611265004616806, "grad_norm": 0.3751578629016876, "learning_rate": 1.887e-05, "loss": 0.0185, "step": 6293 }, { "epoch": 11.613111726685133, "grad_norm": 0.38899144530296326, "learning_rate": 1.8873e-05, "loss": 0.0106, "step": 6294 }, { "epoch": 11.614958448753463, "grad_norm": 0.6312898993492126, "learning_rate": 1.8876e-05, "loss": 0.029, "step": 6295 }, { "epoch": 11.616805170821792, "grad_norm": 1.0704835653305054, "learning_rate": 1.8879e-05, "loss": 0.0254, "step": 6296 }, { "epoch": 11.61865189289012, "grad_norm": 0.9887194633483887, "learning_rate": 1.8882e-05, "loss": 0.0248, "step": 6297 }, { "epoch": 11.620498614958448, "grad_norm": 0.5434224009513855, "learning_rate": 1.8885e-05, "loss": 0.0297, "step": 6298 }, { "epoch": 11.622345337026777, "grad_norm": 0.506160318851471, "learning_rate": 1.8888000000000003e-05, "loss": 0.0176, "step": 6299 }, { "epoch": 11.624192059095106, "grad_norm": 1.1333355903625488, "learning_rate": 1.8891000000000003e-05, "loss": 0.045, "step": 6300 }, { "epoch": 11.626038781163436, "grad_norm": 0.3217025399208069, "learning_rate": 1.8894000000000002e-05, "loss": 0.0174, "step": 6301 }, { "epoch": 11.627885503231763, "grad_norm": 0.4421088397502899, "learning_rate": 1.8897000000000002e-05, "loss": 0.0144, "step": 6302 }, { "epoch": 11.629732225300092, "grad_norm": 0.5572022199630737, "learning_rate": 1.8900000000000002e-05, "loss": 0.0194, "step": 6303 }, { "epoch": 11.631578947368421, "grad_norm": 0.292294979095459, "learning_rate": 1.8903000000000002e-05, "loss": 0.0143, "step": 6304 }, { "epoch": 11.63342566943675, "grad_norm": 1.2303966283798218, "learning_rate": 1.8906e-05, "loss": 0.0317, "step": 6305 }, { "epoch": 11.635272391505078, "grad_norm": 0.5086663365364075, "learning_rate": 1.8908999999999998e-05, "loss": 0.022, "step": 6306 }, { "epoch": 11.637119113573407, "grad_norm": 0.9124029874801636, "learning_rate": 1.8911999999999998e-05, "loss": 0.0346, "step": 6307 }, { "epoch": 11.638965835641736, "grad_norm": 0.47102823853492737, "learning_rate": 1.8914999999999998e-05, "loss": 0.0181, "step": 6308 }, { "epoch": 11.640812557710065, "grad_norm": 0.624832808971405, "learning_rate": 1.8918e-05, "loss": 0.0241, "step": 6309 }, { "epoch": 11.642659279778393, "grad_norm": 0.7668011784553528, "learning_rate": 1.8921e-05, "loss": 0.0271, "step": 6310 }, { "epoch": 11.644506001846722, "grad_norm": 0.4850684404373169, "learning_rate": 1.8924e-05, "loss": 0.0169, "step": 6311 }, { "epoch": 11.64635272391505, "grad_norm": 0.40155303478240967, "learning_rate": 1.8927e-05, "loss": 0.0182, "step": 6312 }, { "epoch": 11.64819944598338, "grad_norm": 0.7572173476219177, "learning_rate": 1.893e-05, "loss": 0.2137, "step": 6313 }, { "epoch": 11.65004616805171, "grad_norm": 0.5553044080734253, "learning_rate": 1.8933e-05, "loss": 0.1823, "step": 6314 }, { "epoch": 11.651892890120036, "grad_norm": 0.7447746396064758, "learning_rate": 1.8936e-05, "loss": 0.1886, "step": 6315 }, { "epoch": 11.653739612188366, "grad_norm": 0.6007423400878906, "learning_rate": 1.8939e-05, "loss": 0.1405, "step": 6316 }, { "epoch": 11.655586334256695, "grad_norm": 0.6235414147377014, "learning_rate": 1.8942e-05, "loss": 0.1075, "step": 6317 }, { "epoch": 11.657433056325024, "grad_norm": 0.6585131287574768, "learning_rate": 1.8945e-05, "loss": 0.1024, "step": 6318 }, { "epoch": 11.659279778393351, "grad_norm": 0.7461692690849304, "learning_rate": 1.8948000000000002e-05, "loss": 0.1454, "step": 6319 }, { "epoch": 11.66112650046168, "grad_norm": 0.5178083181381226, "learning_rate": 1.8951000000000002e-05, "loss": 0.0975, "step": 6320 }, { "epoch": 11.66297322253001, "grad_norm": 0.7293455004692078, "learning_rate": 1.8954000000000002e-05, "loss": 0.1067, "step": 6321 }, { "epoch": 11.664819944598339, "grad_norm": 0.44974884390830994, "learning_rate": 1.8957e-05, "loss": 0.0661, "step": 6322 }, { "epoch": 11.666666666666666, "grad_norm": 0.45195817947387695, "learning_rate": 1.896e-05, "loss": 0.0724, "step": 6323 }, { "epoch": 11.668513388734995, "grad_norm": 0.5777408480644226, "learning_rate": 1.8963e-05, "loss": 0.0848, "step": 6324 }, { "epoch": 11.670360110803324, "grad_norm": 0.6494334936141968, "learning_rate": 1.8966e-05, "loss": 0.0469, "step": 6325 }, { "epoch": 11.672206832871654, "grad_norm": 0.5034980773925781, "learning_rate": 1.8969e-05, "loss": 0.0652, "step": 6326 }, { "epoch": 11.67405355493998, "grad_norm": 0.37976697087287903, "learning_rate": 1.8972e-05, "loss": 0.0352, "step": 6327 }, { "epoch": 11.67590027700831, "grad_norm": 0.932072639465332, "learning_rate": 1.8975e-05, "loss": 0.0583, "step": 6328 }, { "epoch": 11.67774699907664, "grad_norm": 0.4678153395652771, "learning_rate": 1.8978000000000004e-05, "loss": 0.0266, "step": 6329 }, { "epoch": 11.679593721144968, "grad_norm": 0.45993855595588684, "learning_rate": 1.8981e-05, "loss": 0.0398, "step": 6330 }, { "epoch": 11.681440443213296, "grad_norm": 0.587837815284729, "learning_rate": 1.8984e-05, "loss": 0.053, "step": 6331 }, { "epoch": 11.683287165281625, "grad_norm": 0.5369887948036194, "learning_rate": 1.8987e-05, "loss": 0.0946, "step": 6332 }, { "epoch": 11.685133887349954, "grad_norm": 0.803807258605957, "learning_rate": 1.899e-05, "loss": 0.0356, "step": 6333 }, { "epoch": 11.686980609418283, "grad_norm": 0.4701552391052246, "learning_rate": 1.8993e-05, "loss": 0.0325, "step": 6334 }, { "epoch": 11.68882733148661, "grad_norm": 0.6736271381378174, "learning_rate": 1.8996e-05, "loss": 0.0449, "step": 6335 }, { "epoch": 11.69067405355494, "grad_norm": 0.35552453994750977, "learning_rate": 1.8999e-05, "loss": 0.0203, "step": 6336 }, { "epoch": 11.692520775623269, "grad_norm": 0.3159559965133667, "learning_rate": 1.9002e-05, "loss": 0.0209, "step": 6337 }, { "epoch": 11.694367497691598, "grad_norm": 0.38443759083747864, "learning_rate": 1.9005e-05, "loss": 0.0223, "step": 6338 }, { "epoch": 11.696214219759927, "grad_norm": 0.20609219372272491, "learning_rate": 1.9008e-05, "loss": 0.0133, "step": 6339 }, { "epoch": 11.698060941828254, "grad_norm": 0.5753526091575623, "learning_rate": 1.9011e-05, "loss": 0.0636, "step": 6340 }, { "epoch": 11.699907663896584, "grad_norm": 0.38154879212379456, "learning_rate": 1.9014e-05, "loss": 0.0193, "step": 6341 }, { "epoch": 11.701754385964913, "grad_norm": 0.485611230134964, "learning_rate": 1.9017e-05, "loss": 0.0177, "step": 6342 }, { "epoch": 11.703601108033242, "grad_norm": 0.3644583523273468, "learning_rate": 1.902e-05, "loss": 0.0158, "step": 6343 }, { "epoch": 11.70544783010157, "grad_norm": 0.33003613352775574, "learning_rate": 1.9023e-05, "loss": 0.0141, "step": 6344 }, { "epoch": 11.707294552169898, "grad_norm": 0.33550500869750977, "learning_rate": 1.9026e-05, "loss": 0.0136, "step": 6345 }, { "epoch": 11.709141274238227, "grad_norm": 0.3046441376209259, "learning_rate": 1.9029e-05, "loss": 0.0159, "step": 6346 }, { "epoch": 11.710987996306557, "grad_norm": 0.6863639950752258, "learning_rate": 1.9032e-05, "loss": 0.0201, "step": 6347 }, { "epoch": 11.712834718374884, "grad_norm": 0.7514137029647827, "learning_rate": 1.9035e-05, "loss": 0.0305, "step": 6348 }, { "epoch": 11.714681440443213, "grad_norm": 0.5391896963119507, "learning_rate": 1.9038000000000003e-05, "loss": 0.0214, "step": 6349 }, { "epoch": 11.716528162511542, "grad_norm": 0.5416777729988098, "learning_rate": 1.9041000000000003e-05, "loss": 0.0147, "step": 6350 }, { "epoch": 11.718374884579871, "grad_norm": 0.7514315843582153, "learning_rate": 1.9044000000000003e-05, "loss": 0.019, "step": 6351 }, { "epoch": 11.720221606648199, "grad_norm": 0.8173295259475708, "learning_rate": 1.9047000000000002e-05, "loss": 0.0279, "step": 6352 }, { "epoch": 11.722068328716528, "grad_norm": 0.7046154141426086, "learning_rate": 1.9050000000000002e-05, "loss": 0.0148, "step": 6353 }, { "epoch": 11.723915050784857, "grad_norm": 0.33618953824043274, "learning_rate": 1.9053000000000002e-05, "loss": 0.0141, "step": 6354 }, { "epoch": 11.725761772853186, "grad_norm": 0.4373946189880371, "learning_rate": 1.9056e-05, "loss": 0.0264, "step": 6355 }, { "epoch": 11.727608494921514, "grad_norm": 0.7646742463111877, "learning_rate": 1.9058999999999998e-05, "loss": 0.0249, "step": 6356 }, { "epoch": 11.729455216989843, "grad_norm": 0.9046465158462524, "learning_rate": 1.9061999999999998e-05, "loss": 0.0266, "step": 6357 }, { "epoch": 11.731301939058172, "grad_norm": 0.4174250662326813, "learning_rate": 1.9064999999999998e-05, "loss": 0.0228, "step": 6358 }, { "epoch": 11.733148661126501, "grad_norm": 0.756666898727417, "learning_rate": 1.9068e-05, "loss": 0.0473, "step": 6359 }, { "epoch": 11.734995383194828, "grad_norm": 0.5958936214447021, "learning_rate": 1.9071e-05, "loss": 0.0234, "step": 6360 }, { "epoch": 11.736842105263158, "grad_norm": 0.7871098518371582, "learning_rate": 1.9074e-05, "loss": 0.0198, "step": 6361 }, { "epoch": 11.738688827331487, "grad_norm": 1.7256369590759277, "learning_rate": 1.9077e-05, "loss": 0.0317, "step": 6362 }, { "epoch": 11.740535549399816, "grad_norm": 0.892645537853241, "learning_rate": 1.908e-05, "loss": 0.2726, "step": 6363 }, { "epoch": 11.742382271468145, "grad_norm": 0.7114629745483398, "learning_rate": 1.9083e-05, "loss": 0.2411, "step": 6364 }, { "epoch": 11.744228993536472, "grad_norm": 0.6901366710662842, "learning_rate": 1.9086e-05, "loss": 0.1997, "step": 6365 }, { "epoch": 11.746075715604801, "grad_norm": 0.7185481786727905, "learning_rate": 1.9089e-05, "loss": 0.164, "step": 6366 }, { "epoch": 11.74792243767313, "grad_norm": 0.676231324672699, "learning_rate": 1.9092e-05, "loss": 0.1472, "step": 6367 }, { "epoch": 11.749769159741458, "grad_norm": 0.5787920355796814, "learning_rate": 1.9095e-05, "loss": 0.1133, "step": 6368 }, { "epoch": 11.751615881809787, "grad_norm": 0.5355517268180847, "learning_rate": 1.9098000000000002e-05, "loss": 0.117, "step": 6369 }, { "epoch": 11.753462603878116, "grad_norm": 0.7404063940048218, "learning_rate": 1.9101000000000002e-05, "loss": 0.1369, "step": 6370 }, { "epoch": 11.755309325946445, "grad_norm": 0.5565574765205383, "learning_rate": 1.9104000000000002e-05, "loss": 0.0798, "step": 6371 }, { "epoch": 11.757156048014775, "grad_norm": 0.5662729144096375, "learning_rate": 1.9107000000000002e-05, "loss": 0.086, "step": 6372 }, { "epoch": 11.759002770083102, "grad_norm": 0.5577512383460999, "learning_rate": 1.911e-05, "loss": 0.0923, "step": 6373 }, { "epoch": 11.760849492151431, "grad_norm": 0.5840328931808472, "learning_rate": 1.9113e-05, "loss": 0.0757, "step": 6374 }, { "epoch": 11.76269621421976, "grad_norm": 0.564666748046875, "learning_rate": 1.9116e-05, "loss": 0.0569, "step": 6375 }, { "epoch": 11.76454293628809, "grad_norm": 0.8744934797286987, "learning_rate": 1.9119e-05, "loss": 0.1026, "step": 6376 }, { "epoch": 11.766389658356417, "grad_norm": 0.41327589750289917, "learning_rate": 1.9122e-05, "loss": 0.0332, "step": 6377 }, { "epoch": 11.768236380424746, "grad_norm": 0.43064767122268677, "learning_rate": 1.9125e-05, "loss": 0.0283, "step": 6378 }, { "epoch": 11.770083102493075, "grad_norm": 0.46121013164520264, "learning_rate": 1.9128e-05, "loss": 0.0629, "step": 6379 }, { "epoch": 11.771929824561404, "grad_norm": 0.3944796621799469, "learning_rate": 1.9131e-05, "loss": 0.0398, "step": 6380 }, { "epoch": 11.773776546629731, "grad_norm": 0.3431522250175476, "learning_rate": 1.9134e-05, "loss": 0.0369, "step": 6381 }, { "epoch": 11.77562326869806, "grad_norm": 0.3767950236797333, "learning_rate": 1.9137e-05, "loss": 0.0288, "step": 6382 }, { "epoch": 11.77746999076639, "grad_norm": 0.3588629364967346, "learning_rate": 1.914e-05, "loss": 0.0206, "step": 6383 }, { "epoch": 11.779316712834719, "grad_norm": 0.5769614577293396, "learning_rate": 1.9143e-05, "loss": 0.0299, "step": 6384 }, { "epoch": 11.781163434903046, "grad_norm": 0.49329817295074463, "learning_rate": 1.9146e-05, "loss": 0.033, "step": 6385 }, { "epoch": 11.783010156971375, "grad_norm": 0.38085702061653137, "learning_rate": 1.9149e-05, "loss": 0.0218, "step": 6386 }, { "epoch": 11.784856879039705, "grad_norm": 0.970015287399292, "learning_rate": 1.9152e-05, "loss": 0.0306, "step": 6387 }, { "epoch": 11.786703601108034, "grad_norm": 0.4538666903972626, "learning_rate": 1.9155e-05, "loss": 0.0162, "step": 6388 }, { "epoch": 11.788550323176363, "grad_norm": 0.45507508516311646, "learning_rate": 1.9158e-05, "loss": 0.0243, "step": 6389 }, { "epoch": 11.79039704524469, "grad_norm": 0.4752728343009949, "learning_rate": 1.9161000000000002e-05, "loss": 0.0184, "step": 6390 }, { "epoch": 11.79224376731302, "grad_norm": 0.3834904432296753, "learning_rate": 1.9164e-05, "loss": 0.0195, "step": 6391 }, { "epoch": 11.794090489381349, "grad_norm": 0.42033740878105164, "learning_rate": 1.9167e-05, "loss": 0.0208, "step": 6392 }, { "epoch": 11.795937211449676, "grad_norm": 0.40084317326545715, "learning_rate": 1.917e-05, "loss": 0.0186, "step": 6393 }, { "epoch": 11.797783933518005, "grad_norm": 0.5597448348999023, "learning_rate": 1.9173e-05, "loss": 0.0245, "step": 6394 }, { "epoch": 11.799630655586334, "grad_norm": 0.3703473210334778, "learning_rate": 1.9176e-05, "loss": 0.0216, "step": 6395 }, { "epoch": 11.801477377654663, "grad_norm": 0.39047250151634216, "learning_rate": 1.9179e-05, "loss": 0.0139, "step": 6396 }, { "epoch": 11.803324099722992, "grad_norm": 0.5017368197441101, "learning_rate": 1.9182e-05, "loss": 0.0182, "step": 6397 }, { "epoch": 11.80517082179132, "grad_norm": 0.7702073454856873, "learning_rate": 1.9185e-05, "loss": 0.0157, "step": 6398 }, { "epoch": 11.807017543859649, "grad_norm": 0.5346407890319824, "learning_rate": 1.9188e-05, "loss": 0.0236, "step": 6399 }, { "epoch": 11.808864265927978, "grad_norm": 0.37275490164756775, "learning_rate": 1.9191000000000003e-05, "loss": 0.0215, "step": 6400 }, { "epoch": 11.810710987996307, "grad_norm": 0.6201837062835693, "learning_rate": 1.9194000000000003e-05, "loss": 0.0226, "step": 6401 }, { "epoch": 11.812557710064635, "grad_norm": 1.1140520572662354, "learning_rate": 1.9197000000000003e-05, "loss": 0.0433, "step": 6402 }, { "epoch": 11.814404432132964, "grad_norm": 0.8839095234870911, "learning_rate": 1.9200000000000003e-05, "loss": 0.0261, "step": 6403 }, { "epoch": 11.816251154201293, "grad_norm": 0.5836665034294128, "learning_rate": 1.9203e-05, "loss": 0.0299, "step": 6404 }, { "epoch": 11.818097876269622, "grad_norm": 0.7881624102592468, "learning_rate": 1.9206e-05, "loss": 0.0281, "step": 6405 }, { "epoch": 11.81994459833795, "grad_norm": 0.7433090806007385, "learning_rate": 1.9209e-05, "loss": 0.0277, "step": 6406 }, { "epoch": 11.821791320406279, "grad_norm": 0.5852258205413818, "learning_rate": 1.9212e-05, "loss": 0.0233, "step": 6407 }, { "epoch": 11.823638042474608, "grad_norm": 0.374634712934494, "learning_rate": 1.9214999999999998e-05, "loss": 0.0174, "step": 6408 }, { "epoch": 11.825484764542937, "grad_norm": 0.4605027735233307, "learning_rate": 1.9217999999999998e-05, "loss": 0.0194, "step": 6409 }, { "epoch": 11.827331486611264, "grad_norm": 0.6278563737869263, "learning_rate": 1.9221e-05, "loss": 0.0233, "step": 6410 }, { "epoch": 11.829178208679593, "grad_norm": 0.7279020547866821, "learning_rate": 1.9224e-05, "loss": 0.0452, "step": 6411 }, { "epoch": 11.831024930747922, "grad_norm": 0.5646888613700867, "learning_rate": 1.9227e-05, "loss": 0.0137, "step": 6412 }, { "epoch": 11.832871652816252, "grad_norm": 0.8535590171813965, "learning_rate": 1.923e-05, "loss": 0.2184, "step": 6413 }, { "epoch": 11.83471837488458, "grad_norm": 0.6590369939804077, "learning_rate": 1.9233e-05, "loss": 0.2622, "step": 6414 }, { "epoch": 11.836565096952908, "grad_norm": 0.7224426865577698, "learning_rate": 1.9236e-05, "loss": 0.2415, "step": 6415 }, { "epoch": 11.838411819021237, "grad_norm": 0.6342539191246033, "learning_rate": 1.9239e-05, "loss": 0.1965, "step": 6416 }, { "epoch": 11.840258541089566, "grad_norm": 0.6734256148338318, "learning_rate": 1.9242e-05, "loss": 0.1593, "step": 6417 }, { "epoch": 11.842105263157894, "grad_norm": 0.6946101188659668, "learning_rate": 1.9245e-05, "loss": 0.1726, "step": 6418 }, { "epoch": 11.843951985226223, "grad_norm": 0.6345155835151672, "learning_rate": 1.9248e-05, "loss": 0.1261, "step": 6419 }, { "epoch": 11.845798707294552, "grad_norm": 0.8070105910301208, "learning_rate": 1.9251000000000003e-05, "loss": 0.0942, "step": 6420 }, { "epoch": 11.847645429362881, "grad_norm": 0.6948537230491638, "learning_rate": 1.9254000000000002e-05, "loss": 0.1272, "step": 6421 }, { "epoch": 11.84949215143121, "grad_norm": 1.096514105796814, "learning_rate": 1.9257000000000002e-05, "loss": 0.135, "step": 6422 }, { "epoch": 11.851338873499538, "grad_norm": 0.5865067839622498, "learning_rate": 1.9260000000000002e-05, "loss": 0.1008, "step": 6423 }, { "epoch": 11.853185595567867, "grad_norm": 0.5337828397750854, "learning_rate": 1.9263000000000002e-05, "loss": 0.0691, "step": 6424 }, { "epoch": 11.855032317636196, "grad_norm": 0.5951547026634216, "learning_rate": 1.9266e-05, "loss": 0.0889, "step": 6425 }, { "epoch": 11.856879039704525, "grad_norm": 0.45236048102378845, "learning_rate": 1.9269e-05, "loss": 0.0799, "step": 6426 }, { "epoch": 11.858725761772853, "grad_norm": 0.4453403353691101, "learning_rate": 1.9272e-05, "loss": 0.0491, "step": 6427 }, { "epoch": 11.860572483841182, "grad_norm": 0.6715244650840759, "learning_rate": 1.9275e-05, "loss": 0.0551, "step": 6428 }, { "epoch": 11.86241920590951, "grad_norm": 0.465982049703598, "learning_rate": 1.9277999999999997e-05, "loss": 0.0275, "step": 6429 }, { "epoch": 11.86426592797784, "grad_norm": 0.4710586667060852, "learning_rate": 1.9281e-05, "loss": 0.0405, "step": 6430 }, { "epoch": 11.866112650046167, "grad_norm": 0.37974944710731506, "learning_rate": 1.9284e-05, "loss": 0.0258, "step": 6431 }, { "epoch": 11.867959372114496, "grad_norm": 0.44384387135505676, "learning_rate": 1.9287e-05, "loss": 0.0256, "step": 6432 }, { "epoch": 11.869806094182826, "grad_norm": 0.536176323890686, "learning_rate": 1.929e-05, "loss": 0.0261, "step": 6433 }, { "epoch": 11.871652816251155, "grad_norm": 0.44399988651275635, "learning_rate": 1.9293e-05, "loss": 0.0306, "step": 6434 }, { "epoch": 11.873499538319482, "grad_norm": 0.47737666964530945, "learning_rate": 1.9296e-05, "loss": 0.0285, "step": 6435 }, { "epoch": 11.875346260387811, "grad_norm": 0.35225576162338257, "learning_rate": 1.9299e-05, "loss": 0.0196, "step": 6436 }, { "epoch": 11.87719298245614, "grad_norm": 0.5185000896453857, "learning_rate": 1.9302e-05, "loss": 0.025, "step": 6437 }, { "epoch": 11.87903970452447, "grad_norm": 0.33857572078704834, "learning_rate": 1.9305e-05, "loss": 0.0132, "step": 6438 }, { "epoch": 11.880886426592799, "grad_norm": 0.40006375312805176, "learning_rate": 1.9308e-05, "loss": 0.0192, "step": 6439 }, { "epoch": 11.882733148661126, "grad_norm": 0.3900461494922638, "learning_rate": 1.9311000000000002e-05, "loss": 0.0238, "step": 6440 }, { "epoch": 11.884579870729455, "grad_norm": 0.22454053163528442, "learning_rate": 1.9314000000000002e-05, "loss": 0.0128, "step": 6441 }, { "epoch": 11.886426592797784, "grad_norm": 0.6170540452003479, "learning_rate": 1.9317e-05, "loss": 0.0245, "step": 6442 }, { "epoch": 11.888273314866112, "grad_norm": 0.4073467254638672, "learning_rate": 1.932e-05, "loss": 0.0151, "step": 6443 }, { "epoch": 11.89012003693444, "grad_norm": 0.4349503517150879, "learning_rate": 1.9323e-05, "loss": 0.0193, "step": 6444 }, { "epoch": 11.89196675900277, "grad_norm": 0.6105451583862305, "learning_rate": 1.9326e-05, "loss": 0.0266, "step": 6445 }, { "epoch": 11.8938134810711, "grad_norm": 0.44563114643096924, "learning_rate": 1.9329e-05, "loss": 0.0201, "step": 6446 }, { "epoch": 11.895660203139428, "grad_norm": 0.37493470311164856, "learning_rate": 1.9332e-05, "loss": 0.0161, "step": 6447 }, { "epoch": 11.897506925207756, "grad_norm": 0.5489336252212524, "learning_rate": 1.9335e-05, "loss": 0.026, "step": 6448 }, { "epoch": 11.899353647276085, "grad_norm": 0.4425601661205292, "learning_rate": 1.9338e-05, "loss": 0.0341, "step": 6449 }, { "epoch": 11.901200369344414, "grad_norm": 1.0750174522399902, "learning_rate": 1.9341000000000003e-05, "loss": 0.0274, "step": 6450 }, { "epoch": 11.903047091412743, "grad_norm": 0.5695386528968811, "learning_rate": 1.9344000000000003e-05, "loss": 0.0247, "step": 6451 }, { "epoch": 11.90489381348107, "grad_norm": 0.953221321105957, "learning_rate": 1.9347000000000003e-05, "loss": 0.0357, "step": 6452 }, { "epoch": 11.9067405355494, "grad_norm": 0.5750436782836914, "learning_rate": 1.935e-05, "loss": 0.0235, "step": 6453 }, { "epoch": 11.908587257617729, "grad_norm": 0.4252550005912781, "learning_rate": 1.9353e-05, "loss": 0.0249, "step": 6454 }, { "epoch": 11.910433979686058, "grad_norm": 1.0517467260360718, "learning_rate": 1.9356e-05, "loss": 0.0216, "step": 6455 }, { "epoch": 11.912280701754385, "grad_norm": 0.45456618070602417, "learning_rate": 1.9359e-05, "loss": 0.0185, "step": 6456 }, { "epoch": 11.914127423822714, "grad_norm": 0.9214330315589905, "learning_rate": 1.9362e-05, "loss": 0.0264, "step": 6457 }, { "epoch": 11.915974145891044, "grad_norm": 0.6516225337982178, "learning_rate": 1.9365e-05, "loss": 0.0231, "step": 6458 }, { "epoch": 11.917820867959373, "grad_norm": 0.8239295482635498, "learning_rate": 1.9367999999999998e-05, "loss": 0.0239, "step": 6459 }, { "epoch": 11.9196675900277, "grad_norm": 0.7495092153549194, "learning_rate": 1.9371e-05, "loss": 0.0236, "step": 6460 }, { "epoch": 11.92151431209603, "grad_norm": 1.0203250646591187, "learning_rate": 1.9374e-05, "loss": 0.0328, "step": 6461 }, { "epoch": 11.923361034164358, "grad_norm": 1.0962752103805542, "learning_rate": 1.9377e-05, "loss": 0.0556, "step": 6462 }, { "epoch": 11.925207756232687, "grad_norm": 0.7170693278312683, "learning_rate": 1.938e-05, "loss": 0.2026, "step": 6463 }, { "epoch": 11.927054478301017, "grad_norm": 0.6785096526145935, "learning_rate": 1.9383e-05, "loss": 0.163, "step": 6464 }, { "epoch": 11.928901200369344, "grad_norm": 0.4773499369621277, "learning_rate": 1.9386e-05, "loss": 0.1217, "step": 6465 }, { "epoch": 11.930747922437673, "grad_norm": 0.9946964383125305, "learning_rate": 1.9389e-05, "loss": 0.1997, "step": 6466 }, { "epoch": 11.932594644506002, "grad_norm": 0.8184168934822083, "learning_rate": 1.9392e-05, "loss": 0.1777, "step": 6467 }, { "epoch": 11.93444136657433, "grad_norm": 0.7503487467765808, "learning_rate": 1.9395e-05, "loss": 0.1585, "step": 6468 }, { "epoch": 11.936288088642659, "grad_norm": 0.4815266728401184, "learning_rate": 1.9398e-05, "loss": 0.1039, "step": 6469 }, { "epoch": 11.938134810710988, "grad_norm": 0.8742775917053223, "learning_rate": 1.9401000000000003e-05, "loss": 0.1137, "step": 6470 }, { "epoch": 11.939981532779317, "grad_norm": 0.5850298404693604, "learning_rate": 1.9404000000000003e-05, "loss": 0.0984, "step": 6471 }, { "epoch": 11.941828254847646, "grad_norm": 0.6854799389839172, "learning_rate": 1.9407000000000002e-05, "loss": 0.0732, "step": 6472 }, { "epoch": 11.943674976915974, "grad_norm": 0.5733482837677002, "learning_rate": 1.9410000000000002e-05, "loss": 0.0735, "step": 6473 }, { "epoch": 11.945521698984303, "grad_norm": 0.5619057416915894, "learning_rate": 1.9413000000000002e-05, "loss": 0.059, "step": 6474 }, { "epoch": 11.947368421052632, "grad_norm": 0.5128675699234009, "learning_rate": 1.9416000000000002e-05, "loss": 0.0609, "step": 6475 }, { "epoch": 11.949215143120961, "grad_norm": 0.5612236261367798, "learning_rate": 1.9419e-05, "loss": 0.0606, "step": 6476 }, { "epoch": 11.951061865189288, "grad_norm": 0.5712298154830933, "learning_rate": 1.9422e-05, "loss": 0.037, "step": 6477 }, { "epoch": 11.952908587257618, "grad_norm": 0.8684074282646179, "learning_rate": 1.9424999999999998e-05, "loss": 0.0637, "step": 6478 }, { "epoch": 11.954755309325947, "grad_norm": 0.3099026083946228, "learning_rate": 1.9427999999999998e-05, "loss": 0.0245, "step": 6479 }, { "epoch": 11.956602031394276, "grad_norm": 0.2949822247028351, "learning_rate": 1.9431e-05, "loss": 0.0138, "step": 6480 }, { "epoch": 11.958448753462603, "grad_norm": 0.3665012717247009, "learning_rate": 1.9434e-05, "loss": 0.0161, "step": 6481 }, { "epoch": 11.960295475530932, "grad_norm": 0.3798087239265442, "learning_rate": 1.9437e-05, "loss": 0.0244, "step": 6482 }, { "epoch": 11.962142197599261, "grad_norm": 0.525526225566864, "learning_rate": 1.944e-05, "loss": 0.0402, "step": 6483 }, { "epoch": 11.96398891966759, "grad_norm": 0.45501410961151123, "learning_rate": 1.9443e-05, "loss": 0.0238, "step": 6484 }, { "epoch": 11.965835641735918, "grad_norm": 0.26545315980911255, "learning_rate": 1.9446e-05, "loss": 0.0135, "step": 6485 }, { "epoch": 11.967682363804247, "grad_norm": 0.6194910407066345, "learning_rate": 1.9449e-05, "loss": 0.0347, "step": 6486 }, { "epoch": 11.969529085872576, "grad_norm": 0.5148172378540039, "learning_rate": 1.9452e-05, "loss": 0.0699, "step": 6487 }, { "epoch": 11.971375807940905, "grad_norm": 0.6581459045410156, "learning_rate": 1.9455e-05, "loss": 0.0313, "step": 6488 }, { "epoch": 11.973222530009235, "grad_norm": 0.6907075047492981, "learning_rate": 1.9458e-05, "loss": 0.0248, "step": 6489 }, { "epoch": 11.975069252077562, "grad_norm": 0.4608518183231354, "learning_rate": 1.9461000000000002e-05, "loss": 0.0242, "step": 6490 }, { "epoch": 11.976915974145891, "grad_norm": 0.48929011821746826, "learning_rate": 1.9464000000000002e-05, "loss": 0.0171, "step": 6491 }, { "epoch": 11.97876269621422, "grad_norm": 0.5067629218101501, "learning_rate": 1.9467000000000002e-05, "loss": 0.0204, "step": 6492 }, { "epoch": 11.980609418282548, "grad_norm": 0.5316839814186096, "learning_rate": 1.947e-05, "loss": 0.0255, "step": 6493 }, { "epoch": 11.982456140350877, "grad_norm": 0.7034479975700378, "learning_rate": 1.9473e-05, "loss": 0.0407, "step": 6494 }, { "epoch": 11.984302862419206, "grad_norm": 0.469872385263443, "learning_rate": 1.9476e-05, "loss": 0.0268, "step": 6495 }, { "epoch": 11.986149584487535, "grad_norm": 0.49888551235198975, "learning_rate": 1.9479e-05, "loss": 0.0164, "step": 6496 }, { "epoch": 11.987996306555864, "grad_norm": 0.5648143887519836, "learning_rate": 1.9482e-05, "loss": 0.0296, "step": 6497 }, { "epoch": 11.989843028624191, "grad_norm": 0.43154245615005493, "learning_rate": 1.9485e-05, "loss": 0.023, "step": 6498 }, { "epoch": 11.99168975069252, "grad_norm": 0.7657442092895508, "learning_rate": 1.9488e-05, "loss": 0.0136, "step": 6499 }, { "epoch": 11.99353647276085, "grad_norm": 0.6309219002723694, "learning_rate": 1.9491000000000004e-05, "loss": 0.0256, "step": 6500 }, { "epoch": 11.995383194829179, "grad_norm": 0.5489864945411682, "learning_rate": 1.9494000000000003e-05, "loss": 0.0134, "step": 6501 }, { "epoch": 11.997229916897506, "grad_norm": 0.5915862321853638, "learning_rate": 1.9497e-05, "loss": 0.0278, "step": 6502 }, { "epoch": 11.999076638965835, "grad_norm": 1.2053042650222778, "learning_rate": 1.95e-05, "loss": 0.045, "step": 6503 }, { "epoch": 12.0, "grad_norm": 0.3713594377040863, "learning_rate": 1.9503e-05, "loss": 0.0098, "step": 6504 }, { "epoch": 12.00184672206833, "grad_norm": 2.1398563385009766, "learning_rate": 1.9506e-05, "loss": 0.2887, "step": 6505 }, { "epoch": 12.003693444136658, "grad_norm": 0.7182486653327942, "learning_rate": 1.9509e-05, "loss": 0.2153, "step": 6506 }, { "epoch": 12.005540166204986, "grad_norm": 0.6150814294815063, "learning_rate": 1.9512e-05, "loss": 0.1447, "step": 6507 }, { "epoch": 12.007386888273315, "grad_norm": 0.772539496421814, "learning_rate": 1.9515e-05, "loss": 0.1322, "step": 6508 }, { "epoch": 12.009233610341644, "grad_norm": 1.2308114767074585, "learning_rate": 1.9518e-05, "loss": 0.1985, "step": 6509 }, { "epoch": 12.011080332409973, "grad_norm": 1.0095338821411133, "learning_rate": 1.9520999999999998e-05, "loss": 0.1299, "step": 6510 }, { "epoch": 12.0129270544783, "grad_norm": 0.43991515040397644, "learning_rate": 1.9524e-05, "loss": 0.0782, "step": 6511 }, { "epoch": 12.01477377654663, "grad_norm": 0.7752163410186768, "learning_rate": 1.9527e-05, "loss": 0.128, "step": 6512 }, { "epoch": 12.016620498614959, "grad_norm": 0.5800342559814453, "learning_rate": 1.953e-05, "loss": 0.0866, "step": 6513 }, { "epoch": 12.018467220683288, "grad_norm": 0.5003458261489868, "learning_rate": 1.9533e-05, "loss": 0.0812, "step": 6514 }, { "epoch": 12.020313942751615, "grad_norm": 0.6201184988021851, "learning_rate": 1.9536e-05, "loss": 0.0759, "step": 6515 }, { "epoch": 12.022160664819944, "grad_norm": 0.4879046678543091, "learning_rate": 1.9539e-05, "loss": 0.075, "step": 6516 }, { "epoch": 12.024007386888274, "grad_norm": 0.5425206422805786, "learning_rate": 1.9542e-05, "loss": 0.0834, "step": 6517 }, { "epoch": 12.025854108956603, "grad_norm": 0.3138076961040497, "learning_rate": 1.9545e-05, "loss": 0.0351, "step": 6518 }, { "epoch": 12.02770083102493, "grad_norm": 0.32039162516593933, "learning_rate": 1.9548e-05, "loss": 0.0247, "step": 6519 }, { "epoch": 12.02954755309326, "grad_norm": 0.28697481751441956, "learning_rate": 1.9551e-05, "loss": 0.023, "step": 6520 }, { "epoch": 12.031394275161588, "grad_norm": 0.4406968355178833, "learning_rate": 1.9554000000000003e-05, "loss": 0.0343, "step": 6521 }, { "epoch": 12.033240997229917, "grad_norm": 0.437920480966568, "learning_rate": 1.9557000000000003e-05, "loss": 0.0232, "step": 6522 }, { "epoch": 12.035087719298245, "grad_norm": 0.24330918490886688, "learning_rate": 1.9560000000000002e-05, "loss": 0.0188, "step": 6523 }, { "epoch": 12.036934441366574, "grad_norm": 0.4772188663482666, "learning_rate": 1.9563000000000002e-05, "loss": 0.0483, "step": 6524 }, { "epoch": 12.038781163434903, "grad_norm": 0.4449160397052765, "learning_rate": 1.9566000000000002e-05, "loss": 0.0279, "step": 6525 }, { "epoch": 12.040627885503232, "grad_norm": 0.481763631105423, "learning_rate": 1.9569000000000002e-05, "loss": 0.0334, "step": 6526 }, { "epoch": 12.04247460757156, "grad_norm": 0.786405086517334, "learning_rate": 1.9571999999999998e-05, "loss": 0.0255, "step": 6527 }, { "epoch": 12.044321329639889, "grad_norm": 0.2664114832878113, "learning_rate": 1.9574999999999998e-05, "loss": 0.0123, "step": 6528 }, { "epoch": 12.046168051708218, "grad_norm": 0.2534761428833008, "learning_rate": 1.9577999999999998e-05, "loss": 0.0123, "step": 6529 }, { "epoch": 12.048014773776547, "grad_norm": 0.4776705205440521, "learning_rate": 1.9580999999999998e-05, "loss": 0.0211, "step": 6530 }, { "epoch": 12.049861495844876, "grad_norm": 0.8299447298049927, "learning_rate": 1.9584e-05, "loss": 0.0158, "step": 6531 }, { "epoch": 12.051708217913204, "grad_norm": 0.2538312077522278, "learning_rate": 1.9587e-05, "loss": 0.011, "step": 6532 }, { "epoch": 12.053554939981533, "grad_norm": 1.139421820640564, "learning_rate": 1.959e-05, "loss": 0.021, "step": 6533 }, { "epoch": 12.055401662049862, "grad_norm": 0.7884008288383484, "learning_rate": 1.9593e-05, "loss": 0.1168, "step": 6534 }, { "epoch": 12.057248384118191, "grad_norm": 0.7113057374954224, "learning_rate": 1.9596e-05, "loss": 0.0277, "step": 6535 }, { "epoch": 12.059095106186518, "grad_norm": 0.9404953718185425, "learning_rate": 1.9599e-05, "loss": 0.0284, "step": 6536 }, { "epoch": 12.060941828254848, "grad_norm": 0.6621551513671875, "learning_rate": 1.9602e-05, "loss": 0.0208, "step": 6537 }, { "epoch": 12.062788550323177, "grad_norm": 0.6471493244171143, "learning_rate": 1.9605e-05, "loss": 0.0229, "step": 6538 }, { "epoch": 12.064635272391506, "grad_norm": 0.6462122797966003, "learning_rate": 1.9608e-05, "loss": 0.0257, "step": 6539 }, { "epoch": 12.066481994459833, "grad_norm": 0.6691087484359741, "learning_rate": 1.9611e-05, "loss": 0.0228, "step": 6540 }, { "epoch": 12.068328716528162, "grad_norm": 0.5461102724075317, "learning_rate": 1.9614000000000002e-05, "loss": 0.046, "step": 6541 }, { "epoch": 12.070175438596491, "grad_norm": 0.46700987219810486, "learning_rate": 1.9617000000000002e-05, "loss": 0.0109, "step": 6542 }, { "epoch": 12.07202216066482, "grad_norm": 0.4373733401298523, "learning_rate": 1.9620000000000002e-05, "loss": 0.0279, "step": 6543 }, { "epoch": 12.073868882733148, "grad_norm": 0.3557780981063843, "learning_rate": 1.9623e-05, "loss": 0.0134, "step": 6544 }, { "epoch": 12.075715604801477, "grad_norm": 0.5521689653396606, "learning_rate": 1.9626e-05, "loss": 0.0166, "step": 6545 }, { "epoch": 12.077562326869806, "grad_norm": 0.43459439277648926, "learning_rate": 1.9629e-05, "loss": 0.016, "step": 6546 }, { "epoch": 12.079409048938135, "grad_norm": 1.5827715396881104, "learning_rate": 1.9632e-05, "loss": 0.0308, "step": 6547 }, { "epoch": 12.081255771006463, "grad_norm": 0.5385817885398865, "learning_rate": 1.9635e-05, "loss": 0.0255, "step": 6548 }, { "epoch": 12.083102493074792, "grad_norm": 0.44712623953819275, "learning_rate": 1.9638e-05, "loss": 0.0099, "step": 6549 }, { "epoch": 12.084949215143121, "grad_norm": 0.621837317943573, "learning_rate": 1.9641e-05, "loss": 0.0273, "step": 6550 }, { "epoch": 12.08679593721145, "grad_norm": 0.321278840303421, "learning_rate": 1.9644e-05, "loss": 0.0159, "step": 6551 }, { "epoch": 12.088642659279778, "grad_norm": 0.6715978980064392, "learning_rate": 1.9647e-05, "loss": 0.0202, "step": 6552 }, { "epoch": 12.090489381348107, "grad_norm": 0.5194762349128723, "learning_rate": 1.965e-05, "loss": 0.0208, "step": 6553 }, { "epoch": 12.092336103416436, "grad_norm": 0.6484201550483704, "learning_rate": 1.9653e-05, "loss": 0.0149, "step": 6554 }, { "epoch": 12.094182825484765, "grad_norm": 1.2438277006149292, "learning_rate": 1.9656e-05, "loss": 0.2754, "step": 6555 }, { "epoch": 12.096029547553094, "grad_norm": 0.9930234551429749, "learning_rate": 1.9659e-05, "loss": 0.239, "step": 6556 }, { "epoch": 12.097876269621421, "grad_norm": 0.6995410323143005, "learning_rate": 1.9662e-05, "loss": 0.2009, "step": 6557 }, { "epoch": 12.09972299168975, "grad_norm": 0.8049606084823608, "learning_rate": 1.9665e-05, "loss": 0.1279, "step": 6558 }, { "epoch": 12.10156971375808, "grad_norm": 0.649700939655304, "learning_rate": 1.9668e-05, "loss": 0.1453, "step": 6559 }, { "epoch": 12.103416435826409, "grad_norm": 0.6611593961715698, "learning_rate": 1.9671e-05, "loss": 0.1059, "step": 6560 }, { "epoch": 12.105263157894736, "grad_norm": 0.6356304883956909, "learning_rate": 1.9674000000000002e-05, "loss": 0.1879, "step": 6561 }, { "epoch": 12.107109879963065, "grad_norm": 2.028852939605713, "learning_rate": 1.9677e-05, "loss": 0.1257, "step": 6562 }, { "epoch": 12.108956602031395, "grad_norm": 0.5349612236022949, "learning_rate": 1.968e-05, "loss": 0.1681, "step": 6563 }, { "epoch": 12.110803324099724, "grad_norm": 1.0658071041107178, "learning_rate": 1.9683e-05, "loss": 0.086, "step": 6564 }, { "epoch": 12.112650046168051, "grad_norm": 0.8170069456100464, "learning_rate": 1.9686e-05, "loss": 0.064, "step": 6565 }, { "epoch": 12.11449676823638, "grad_norm": 0.5723968744277954, "learning_rate": 1.9689e-05, "loss": 0.0567, "step": 6566 }, { "epoch": 12.11634349030471, "grad_norm": 0.8841872811317444, "learning_rate": 1.9692e-05, "loss": 0.1255, "step": 6567 }, { "epoch": 12.118190212373039, "grad_norm": 0.3957323431968689, "learning_rate": 1.9695e-05, "loss": 0.0482, "step": 6568 }, { "epoch": 12.120036934441366, "grad_norm": 0.4203268885612488, "learning_rate": 1.9698e-05, "loss": 0.0498, "step": 6569 }, { "epoch": 12.121883656509695, "grad_norm": 0.48907235264778137, "learning_rate": 1.9701e-05, "loss": 0.0271, "step": 6570 }, { "epoch": 12.123730378578024, "grad_norm": 0.6788593530654907, "learning_rate": 1.9704000000000003e-05, "loss": 0.0692, "step": 6571 }, { "epoch": 12.125577100646353, "grad_norm": 0.3560887575149536, "learning_rate": 1.9707000000000003e-05, "loss": 0.0281, "step": 6572 }, { "epoch": 12.12742382271468, "grad_norm": 0.5257298946380615, "learning_rate": 1.9710000000000003e-05, "loss": 0.0747, "step": 6573 }, { "epoch": 12.12927054478301, "grad_norm": 0.41419556736946106, "learning_rate": 1.9713000000000003e-05, "loss": 0.0192, "step": 6574 }, { "epoch": 12.131117266851339, "grad_norm": 0.40501436591148376, "learning_rate": 1.9716000000000002e-05, "loss": 0.0148, "step": 6575 }, { "epoch": 12.132963988919668, "grad_norm": 0.5695074796676636, "learning_rate": 1.9719e-05, "loss": 0.0535, "step": 6576 }, { "epoch": 12.134810710987995, "grad_norm": 0.6081413626670837, "learning_rate": 1.9722e-05, "loss": 0.0284, "step": 6577 }, { "epoch": 12.136657433056325, "grad_norm": 0.30088096857070923, "learning_rate": 1.9725e-05, "loss": 0.0154, "step": 6578 }, { "epoch": 12.138504155124654, "grad_norm": 0.37103888392448425, "learning_rate": 1.9727999999999998e-05, "loss": 0.0171, "step": 6579 }, { "epoch": 12.140350877192983, "grad_norm": 0.4621252417564392, "learning_rate": 1.9730999999999998e-05, "loss": 0.0283, "step": 6580 }, { "epoch": 12.142197599261312, "grad_norm": 0.26825007796287537, "learning_rate": 1.9734e-05, "loss": 0.0201, "step": 6581 }, { "epoch": 12.14404432132964, "grad_norm": 0.4614250659942627, "learning_rate": 1.9737e-05, "loss": 0.0158, "step": 6582 }, { "epoch": 12.145891043397969, "grad_norm": 0.3444186747074127, "learning_rate": 1.974e-05, "loss": 0.0149, "step": 6583 }, { "epoch": 12.147737765466298, "grad_norm": 0.34118175506591797, "learning_rate": 1.9743e-05, "loss": 0.0165, "step": 6584 }, { "epoch": 12.149584487534627, "grad_norm": 0.669164776802063, "learning_rate": 1.9746e-05, "loss": 0.0141, "step": 6585 }, { "epoch": 12.151431209602954, "grad_norm": 0.532076358795166, "learning_rate": 1.9749e-05, "loss": 0.0254, "step": 6586 }, { "epoch": 12.153277931671283, "grad_norm": 0.40641987323760986, "learning_rate": 1.9752e-05, "loss": 0.0146, "step": 6587 }, { "epoch": 12.155124653739612, "grad_norm": 0.3384653627872467, "learning_rate": 1.9755e-05, "loss": 0.014, "step": 6588 }, { "epoch": 12.156971375807942, "grad_norm": 0.7094037532806396, "learning_rate": 1.9758e-05, "loss": 0.0129, "step": 6589 }, { "epoch": 12.158818097876269, "grad_norm": 0.683089017868042, "learning_rate": 1.9761e-05, "loss": 0.0229, "step": 6590 }, { "epoch": 12.160664819944598, "grad_norm": 0.5520561337471008, "learning_rate": 1.9764000000000003e-05, "loss": 0.0253, "step": 6591 }, { "epoch": 12.162511542012927, "grad_norm": 0.6962130665779114, "learning_rate": 1.9767000000000002e-05, "loss": 0.0172, "step": 6592 }, { "epoch": 12.164358264081256, "grad_norm": 0.5265188813209534, "learning_rate": 1.9770000000000002e-05, "loss": 0.0173, "step": 6593 }, { "epoch": 12.166204986149584, "grad_norm": 0.9148104190826416, "learning_rate": 1.9773000000000002e-05, "loss": 0.0247, "step": 6594 }, { "epoch": 12.168051708217913, "grad_norm": 0.5359881520271301, "learning_rate": 1.9776000000000002e-05, "loss": 0.0196, "step": 6595 }, { "epoch": 12.169898430286242, "grad_norm": 0.41873955726623535, "learning_rate": 1.9779e-05, "loss": 0.0125, "step": 6596 }, { "epoch": 12.171745152354571, "grad_norm": 0.39210107922554016, "learning_rate": 1.9782e-05, "loss": 0.0191, "step": 6597 }, { "epoch": 12.173591874422899, "grad_norm": 0.6035261750221252, "learning_rate": 1.9785e-05, "loss": 0.0198, "step": 6598 }, { "epoch": 12.175438596491228, "grad_norm": 0.3195289373397827, "learning_rate": 1.9788e-05, "loss": 0.0159, "step": 6599 }, { "epoch": 12.177285318559557, "grad_norm": 0.5464214086532593, "learning_rate": 1.9791e-05, "loss": 0.0224, "step": 6600 }, { "epoch": 12.179132040627886, "grad_norm": 0.7400136590003967, "learning_rate": 1.9794e-05, "loss": 0.026, "step": 6601 }, { "epoch": 12.180978762696213, "grad_norm": 0.496610552072525, "learning_rate": 1.9797e-05, "loss": 0.0165, "step": 6602 }, { "epoch": 12.182825484764543, "grad_norm": 0.47159847617149353, "learning_rate": 1.98e-05, "loss": 0.0208, "step": 6603 }, { "epoch": 12.184672206832872, "grad_norm": 0.48338207602500916, "learning_rate": 1.9803e-05, "loss": 0.0202, "step": 6604 }, { "epoch": 12.1865189289012, "grad_norm": 1.570766568183899, "learning_rate": 1.9806e-05, "loss": 0.2364, "step": 6605 }, { "epoch": 12.18836565096953, "grad_norm": 0.74299556016922, "learning_rate": 1.9809e-05, "loss": 0.1835, "step": 6606 }, { "epoch": 12.190212373037857, "grad_norm": 0.6581249833106995, "learning_rate": 1.9812e-05, "loss": 0.1466, "step": 6607 }, { "epoch": 12.192059095106186, "grad_norm": 0.6629277467727661, "learning_rate": 1.9815e-05, "loss": 0.1645, "step": 6608 }, { "epoch": 12.193905817174516, "grad_norm": 0.5991044640541077, "learning_rate": 1.9818e-05, "loss": 0.1617, "step": 6609 }, { "epoch": 12.195752539242845, "grad_norm": 1.113797664642334, "learning_rate": 1.9821e-05, "loss": 0.1707, "step": 6610 }, { "epoch": 12.197599261311172, "grad_norm": 0.6141255497932434, "learning_rate": 1.9824000000000002e-05, "loss": 0.0937, "step": 6611 }, { "epoch": 12.199445983379501, "grad_norm": 0.6133359670639038, "learning_rate": 1.9827000000000002e-05, "loss": 0.0931, "step": 6612 }, { "epoch": 12.20129270544783, "grad_norm": 0.44140446186065674, "learning_rate": 1.983e-05, "loss": 0.0858, "step": 6613 }, { "epoch": 12.20313942751616, "grad_norm": 0.5552379488945007, "learning_rate": 1.9833e-05, "loss": 0.0709, "step": 6614 }, { "epoch": 12.204986149584487, "grad_norm": 0.42327743768692017, "learning_rate": 1.9836e-05, "loss": 0.0575, "step": 6615 }, { "epoch": 12.206832871652816, "grad_norm": 0.8752537369728088, "learning_rate": 1.9839e-05, "loss": 0.0878, "step": 6616 }, { "epoch": 12.208679593721145, "grad_norm": 0.7336311936378479, "learning_rate": 1.9842e-05, "loss": 0.1506, "step": 6617 }, { "epoch": 12.210526315789474, "grad_norm": 0.6331095695495605, "learning_rate": 1.9845e-05, "loss": 0.0754, "step": 6618 }, { "epoch": 12.212373037857802, "grad_norm": 0.6478675007820129, "learning_rate": 1.9848e-05, "loss": 0.046, "step": 6619 }, { "epoch": 12.21421975992613, "grad_norm": 0.46887344121932983, "learning_rate": 1.9851e-05, "loss": 0.0586, "step": 6620 }, { "epoch": 12.21606648199446, "grad_norm": 0.4673554599285126, "learning_rate": 1.9854000000000003e-05, "loss": 0.0213, "step": 6621 }, { "epoch": 12.21791320406279, "grad_norm": 0.3019261658191681, "learning_rate": 1.9857000000000003e-05, "loss": 0.0208, "step": 6622 }, { "epoch": 12.219759926131117, "grad_norm": 0.55424565076828, "learning_rate": 1.9860000000000003e-05, "loss": 0.0172, "step": 6623 }, { "epoch": 12.221606648199446, "grad_norm": 0.5971589088439941, "learning_rate": 1.9863000000000003e-05, "loss": 0.0189, "step": 6624 }, { "epoch": 12.223453370267775, "grad_norm": 2.0746729373931885, "learning_rate": 1.9866e-05, "loss": 0.0486, "step": 6625 }, { "epoch": 12.225300092336104, "grad_norm": 0.390958309173584, "learning_rate": 1.9869e-05, "loss": 0.0159, "step": 6626 }, { "epoch": 12.227146814404431, "grad_norm": 0.7522118091583252, "learning_rate": 1.9872e-05, "loss": 0.0211, "step": 6627 }, { "epoch": 12.22899353647276, "grad_norm": 0.38090553879737854, "learning_rate": 1.9875e-05, "loss": 0.0176, "step": 6628 }, { "epoch": 12.23084025854109, "grad_norm": 0.8301336169242859, "learning_rate": 1.9878e-05, "loss": 0.0378, "step": 6629 }, { "epoch": 12.232686980609419, "grad_norm": 0.6119270324707031, "learning_rate": 1.9880999999999998e-05, "loss": 0.0143, "step": 6630 }, { "epoch": 12.234533702677748, "grad_norm": 0.5565687417984009, "learning_rate": 1.9883999999999998e-05, "loss": 0.0337, "step": 6631 }, { "epoch": 12.236380424746075, "grad_norm": 0.3857928216457367, "learning_rate": 1.9887e-05, "loss": 0.0198, "step": 6632 }, { "epoch": 12.238227146814404, "grad_norm": 0.39611518383026123, "learning_rate": 1.989e-05, "loss": 0.0233, "step": 6633 }, { "epoch": 12.240073868882734, "grad_norm": 0.4528099596500397, "learning_rate": 1.9893e-05, "loss": 0.0185, "step": 6634 }, { "epoch": 12.241920590951063, "grad_norm": 0.666111946105957, "learning_rate": 1.9896e-05, "loss": 0.0155, "step": 6635 }, { "epoch": 12.24376731301939, "grad_norm": 1.013671636581421, "learning_rate": 1.9899e-05, "loss": 0.0189, "step": 6636 }, { "epoch": 12.24561403508772, "grad_norm": 0.293468177318573, "learning_rate": 1.9902e-05, "loss": 0.0166, "step": 6637 }, { "epoch": 12.247460757156048, "grad_norm": 0.6467782855033875, "learning_rate": 1.9905e-05, "loss": 0.0114, "step": 6638 }, { "epoch": 12.249307479224377, "grad_norm": 0.5225874185562134, "learning_rate": 1.9908e-05, "loss": 0.0245, "step": 6639 }, { "epoch": 12.251154201292705, "grad_norm": 0.6721553206443787, "learning_rate": 1.9911e-05, "loss": 0.0244, "step": 6640 }, { "epoch": 12.253000923361034, "grad_norm": 0.7446433305740356, "learning_rate": 1.9914e-05, "loss": 0.0272, "step": 6641 }, { "epoch": 12.254847645429363, "grad_norm": 0.3481638431549072, "learning_rate": 1.9917000000000003e-05, "loss": 0.021, "step": 6642 }, { "epoch": 12.256694367497692, "grad_norm": 0.5886558890342712, "learning_rate": 1.9920000000000002e-05, "loss": 0.017, "step": 6643 }, { "epoch": 12.25854108956602, "grad_norm": 0.3663330078125, "learning_rate": 1.9923000000000002e-05, "loss": 0.0147, "step": 6644 }, { "epoch": 12.260387811634349, "grad_norm": 0.4481843113899231, "learning_rate": 1.9926000000000002e-05, "loss": 0.0211, "step": 6645 }, { "epoch": 12.262234533702678, "grad_norm": 0.6207283735275269, "learning_rate": 1.9929000000000002e-05, "loss": 0.0242, "step": 6646 }, { "epoch": 12.264081255771007, "grad_norm": 0.5860519409179688, "learning_rate": 1.9932e-05, "loss": 0.0208, "step": 6647 }, { "epoch": 12.265927977839334, "grad_norm": 0.34904736280441284, "learning_rate": 1.9935e-05, "loss": 0.0233, "step": 6648 }, { "epoch": 12.267774699907664, "grad_norm": 0.446857213973999, "learning_rate": 1.9938e-05, "loss": 0.0171, "step": 6649 }, { "epoch": 12.269621421975993, "grad_norm": 0.3671776354312897, "learning_rate": 1.9940999999999998e-05, "loss": 0.0146, "step": 6650 }, { "epoch": 12.271468144044322, "grad_norm": 0.499612033367157, "learning_rate": 1.9943999999999997e-05, "loss": 0.0178, "step": 6651 }, { "epoch": 12.27331486611265, "grad_norm": 0.40178707242012024, "learning_rate": 1.9947e-05, "loss": 0.0225, "step": 6652 }, { "epoch": 12.275161588180978, "grad_norm": 1.6798856258392334, "learning_rate": 1.995e-05, "loss": 0.0262, "step": 6653 }, { "epoch": 12.277008310249307, "grad_norm": 0.5195391774177551, "learning_rate": 1.9953e-05, "loss": 0.02, "step": 6654 }, { "epoch": 12.278855032317637, "grad_norm": 1.4945707321166992, "learning_rate": 1.9956e-05, "loss": 0.2249, "step": 6655 }, { "epoch": 12.280701754385966, "grad_norm": 1.618351936340332, "learning_rate": 1.9959e-05, "loss": 0.2945, "step": 6656 }, { "epoch": 12.282548476454293, "grad_norm": 1.1166245937347412, "learning_rate": 1.9962e-05, "loss": 0.1938, "step": 6657 }, { "epoch": 12.284395198522622, "grad_norm": 1.0007214546203613, "learning_rate": 1.9965e-05, "loss": 0.1808, "step": 6658 }, { "epoch": 12.286241920590951, "grad_norm": 0.8794845938682556, "learning_rate": 1.9968e-05, "loss": 0.1573, "step": 6659 }, { "epoch": 12.28808864265928, "grad_norm": 0.7759224772453308, "learning_rate": 1.9971e-05, "loss": 0.1279, "step": 6660 }, { "epoch": 12.289935364727608, "grad_norm": 0.6087407469749451, "learning_rate": 1.9974e-05, "loss": 0.1017, "step": 6661 }, { "epoch": 12.291782086795937, "grad_norm": 0.4634784162044525, "learning_rate": 1.9977000000000002e-05, "loss": 0.0812, "step": 6662 }, { "epoch": 12.293628808864266, "grad_norm": 0.5533508062362671, "learning_rate": 1.9980000000000002e-05, "loss": 0.1524, "step": 6663 }, { "epoch": 12.295475530932595, "grad_norm": 0.7511826753616333, "learning_rate": 1.9983e-05, "loss": 0.09, "step": 6664 }, { "epoch": 12.297322253000923, "grad_norm": 0.5132535696029663, "learning_rate": 1.9986e-05, "loss": 0.0795, "step": 6665 }, { "epoch": 12.299168975069252, "grad_norm": 0.4026097357273102, "learning_rate": 1.9989e-05, "loss": 0.0406, "step": 6666 }, { "epoch": 12.301015697137581, "grad_norm": 0.5257237553596497, "learning_rate": 1.9992e-05, "loss": 0.0618, "step": 6667 }, { "epoch": 12.30286241920591, "grad_norm": 0.329614520072937, "learning_rate": 1.9995e-05, "loss": 0.0445, "step": 6668 }, { "epoch": 12.304709141274238, "grad_norm": 0.41097500920295715, "learning_rate": 1.9998e-05, "loss": 0.0325, "step": 6669 }, { "epoch": 12.306555863342567, "grad_norm": 1.6208419799804688, "learning_rate": 2.0001e-05, "loss": 0.0479, "step": 6670 }, { "epoch": 12.308402585410896, "grad_norm": 0.6512314677238464, "learning_rate": 2.0004e-05, "loss": 0.029, "step": 6671 }, { "epoch": 12.310249307479225, "grad_norm": 0.41237813234329224, "learning_rate": 2.0007000000000003e-05, "loss": 0.042, "step": 6672 }, { "epoch": 12.312096029547552, "grad_norm": 0.4640461206436157, "learning_rate": 2.0010000000000003e-05, "loss": 0.024, "step": 6673 }, { "epoch": 12.313942751615881, "grad_norm": 0.32338428497314453, "learning_rate": 2.0013e-05, "loss": 0.0277, "step": 6674 }, { "epoch": 12.31578947368421, "grad_norm": 0.561813235282898, "learning_rate": 2.0016e-05, "loss": 0.0236, "step": 6675 }, { "epoch": 12.31763619575254, "grad_norm": 0.48433899879455566, "learning_rate": 2.0019e-05, "loss": 0.043, "step": 6676 }, { "epoch": 12.319482917820867, "grad_norm": 0.35797983407974243, "learning_rate": 2.0022e-05, "loss": 0.0152, "step": 6677 }, { "epoch": 12.321329639889196, "grad_norm": 0.24485398828983307, "learning_rate": 2.0025e-05, "loss": 0.019, "step": 6678 }, { "epoch": 12.323176361957525, "grad_norm": 0.5601616501808167, "learning_rate": 2.0028e-05, "loss": 0.0288, "step": 6679 }, { "epoch": 12.325023084025855, "grad_norm": 0.3700091242790222, "learning_rate": 2.0031e-05, "loss": 0.0105, "step": 6680 }, { "epoch": 12.326869806094184, "grad_norm": 0.9464983344078064, "learning_rate": 2.0033999999999998e-05, "loss": 0.0188, "step": 6681 }, { "epoch": 12.328716528162511, "grad_norm": 1.1551624536514282, "learning_rate": 2.0037e-05, "loss": 0.0764, "step": 6682 }, { "epoch": 12.33056325023084, "grad_norm": 0.6184530258178711, "learning_rate": 2.004e-05, "loss": 0.0151, "step": 6683 }, { "epoch": 12.33240997229917, "grad_norm": 0.33395180106163025, "learning_rate": 2.0043e-05, "loss": 0.0082, "step": 6684 }, { "epoch": 12.334256694367498, "grad_norm": 0.39083316922187805, "learning_rate": 2.0046e-05, "loss": 0.0223, "step": 6685 }, { "epoch": 12.336103416435826, "grad_norm": 0.45690304040908813, "learning_rate": 2.0049e-05, "loss": 0.026, "step": 6686 }, { "epoch": 12.337950138504155, "grad_norm": 0.3044404685497284, "learning_rate": 2.0052e-05, "loss": 0.0211, "step": 6687 }, { "epoch": 12.339796860572484, "grad_norm": 0.7949528694152832, "learning_rate": 2.0055e-05, "loss": 0.0256, "step": 6688 }, { "epoch": 12.341643582640813, "grad_norm": 0.6159335374832153, "learning_rate": 2.0058e-05, "loss": 0.0214, "step": 6689 }, { "epoch": 12.34349030470914, "grad_norm": 0.48883113265037537, "learning_rate": 2.0061e-05, "loss": 0.0376, "step": 6690 }, { "epoch": 12.34533702677747, "grad_norm": 0.36205312609672546, "learning_rate": 2.0064e-05, "loss": 0.0169, "step": 6691 }, { "epoch": 12.347183748845799, "grad_norm": 0.4154253900051117, "learning_rate": 2.0067000000000003e-05, "loss": 0.0132, "step": 6692 }, { "epoch": 12.349030470914128, "grad_norm": 0.32109948992729187, "learning_rate": 2.0070000000000003e-05, "loss": 0.0204, "step": 6693 }, { "epoch": 12.350877192982455, "grad_norm": 0.6490615010261536, "learning_rate": 2.0073000000000002e-05, "loss": 0.028, "step": 6694 }, { "epoch": 12.352723915050785, "grad_norm": 0.5937842130661011, "learning_rate": 2.0076000000000002e-05, "loss": 0.0215, "step": 6695 }, { "epoch": 12.354570637119114, "grad_norm": 0.41471704840660095, "learning_rate": 2.0079000000000002e-05, "loss": 0.016, "step": 6696 }, { "epoch": 12.356417359187443, "grad_norm": 0.4338700473308563, "learning_rate": 2.0082000000000002e-05, "loss": 0.0115, "step": 6697 }, { "epoch": 12.35826408125577, "grad_norm": 0.6470513939857483, "learning_rate": 2.0085e-05, "loss": 0.0306, "step": 6698 }, { "epoch": 12.3601108033241, "grad_norm": 0.6698824763298035, "learning_rate": 2.0087999999999998e-05, "loss": 0.0135, "step": 6699 }, { "epoch": 12.361957525392429, "grad_norm": 0.37007206678390503, "learning_rate": 2.0090999999999998e-05, "loss": 0.0146, "step": 6700 }, { "epoch": 12.363804247460758, "grad_norm": 0.4813641905784607, "learning_rate": 2.0093999999999998e-05, "loss": 0.0202, "step": 6701 }, { "epoch": 12.365650969529085, "grad_norm": 1.0598350763320923, "learning_rate": 2.0097e-05, "loss": 0.0451, "step": 6702 }, { "epoch": 12.367497691597414, "grad_norm": 0.682828426361084, "learning_rate": 2.01e-05, "loss": 0.0215, "step": 6703 }, { "epoch": 12.369344413665743, "grad_norm": 0.82156902551651, "learning_rate": 2.0103e-05, "loss": 0.0499, "step": 6704 }, { "epoch": 12.371191135734072, "grad_norm": 0.9906166195869446, "learning_rate": 2.0106e-05, "loss": 0.213, "step": 6705 }, { "epoch": 12.373037857802402, "grad_norm": 0.7346987128257751, "learning_rate": 2.0109e-05, "loss": 0.2196, "step": 6706 }, { "epoch": 12.374884579870729, "grad_norm": 0.6778160929679871, "learning_rate": 2.0112e-05, "loss": 0.1236, "step": 6707 }, { "epoch": 12.376731301939058, "grad_norm": 0.6001963019371033, "learning_rate": 2.0115e-05, "loss": 0.1193, "step": 6708 }, { "epoch": 12.378578024007387, "grad_norm": 0.7126210331916809, "learning_rate": 2.0118e-05, "loss": 0.1331, "step": 6709 }, { "epoch": 12.380424746075716, "grad_norm": 0.580418586730957, "learning_rate": 2.0121e-05, "loss": 0.1309, "step": 6710 }, { "epoch": 12.382271468144044, "grad_norm": 0.837908923625946, "learning_rate": 2.0124e-05, "loss": 0.1743, "step": 6711 }, { "epoch": 12.384118190212373, "grad_norm": 0.47805464267730713, "learning_rate": 2.0127000000000002e-05, "loss": 0.0906, "step": 6712 }, { "epoch": 12.385964912280702, "grad_norm": 0.5674648880958557, "learning_rate": 2.0130000000000002e-05, "loss": 0.0921, "step": 6713 }, { "epoch": 12.387811634349031, "grad_norm": 0.4743923246860504, "learning_rate": 2.0133000000000002e-05, "loss": 0.0776, "step": 6714 }, { "epoch": 12.389658356417359, "grad_norm": 0.807295024394989, "learning_rate": 2.0136e-05, "loss": 0.0961, "step": 6715 }, { "epoch": 12.391505078485688, "grad_norm": 0.6291029453277588, "learning_rate": 2.0139e-05, "loss": 0.1024, "step": 6716 }, { "epoch": 12.393351800554017, "grad_norm": 0.6043853759765625, "learning_rate": 2.0142e-05, "loss": 0.0667, "step": 6717 }, { "epoch": 12.395198522622346, "grad_norm": 0.8836755752563477, "learning_rate": 2.0145e-05, "loss": 0.0419, "step": 6718 }, { "epoch": 12.397045244690673, "grad_norm": 0.7132802605628967, "learning_rate": 2.0148e-05, "loss": 0.0285, "step": 6719 }, { "epoch": 12.398891966759003, "grad_norm": 0.3178471028804779, "learning_rate": 2.0151e-05, "loss": 0.0358, "step": 6720 }, { "epoch": 12.400738688827332, "grad_norm": 0.4992874264717102, "learning_rate": 2.0154e-05, "loss": 0.036, "step": 6721 }, { "epoch": 12.40258541089566, "grad_norm": 0.38336145877838135, "learning_rate": 2.0157000000000004e-05, "loss": 0.0297, "step": 6722 }, { "epoch": 12.404432132963988, "grad_norm": 0.37750759720802307, "learning_rate": 2.016e-05, "loss": 0.0243, "step": 6723 }, { "epoch": 12.406278855032317, "grad_norm": 0.3396998941898346, "learning_rate": 2.0163e-05, "loss": 0.02, "step": 6724 }, { "epoch": 12.408125577100646, "grad_norm": 0.5006681680679321, "learning_rate": 2.0166e-05, "loss": 0.0113, "step": 6725 }, { "epoch": 12.409972299168976, "grad_norm": 0.29943355917930603, "learning_rate": 2.0169e-05, "loss": 0.013, "step": 6726 }, { "epoch": 12.411819021237303, "grad_norm": 0.2731862962245941, "learning_rate": 2.0172e-05, "loss": 0.0179, "step": 6727 }, { "epoch": 12.413665743305632, "grad_norm": 0.4055343270301819, "learning_rate": 2.0175e-05, "loss": 0.0269, "step": 6728 }, { "epoch": 12.415512465373961, "grad_norm": 0.4607265889644623, "learning_rate": 2.0178e-05, "loss": 0.0172, "step": 6729 }, { "epoch": 12.41735918744229, "grad_norm": 0.17374105751514435, "learning_rate": 2.0181e-05, "loss": 0.0122, "step": 6730 }, { "epoch": 12.41920590951062, "grad_norm": 0.30624425411224365, "learning_rate": 2.0184e-05, "loss": 0.0162, "step": 6731 }, { "epoch": 12.421052631578947, "grad_norm": 0.30509719252586365, "learning_rate": 2.0187000000000002e-05, "loss": 0.0176, "step": 6732 }, { "epoch": 12.422899353647276, "grad_norm": 0.6953483819961548, "learning_rate": 2.019e-05, "loss": 0.0228, "step": 6733 }, { "epoch": 12.424746075715605, "grad_norm": 0.5839991569519043, "learning_rate": 2.0193e-05, "loss": 0.0239, "step": 6734 }, { "epoch": 12.426592797783934, "grad_norm": 0.2980464994907379, "learning_rate": 2.0196e-05, "loss": 0.0231, "step": 6735 }, { "epoch": 12.428439519852262, "grad_norm": 0.4845972955226898, "learning_rate": 2.0199e-05, "loss": 0.0149, "step": 6736 }, { "epoch": 12.43028624192059, "grad_norm": 0.5114170908927917, "learning_rate": 2.0202e-05, "loss": 0.0225, "step": 6737 }, { "epoch": 12.43213296398892, "grad_norm": 0.4214576780796051, "learning_rate": 2.0205e-05, "loss": 0.0167, "step": 6738 }, { "epoch": 12.43397968605725, "grad_norm": 0.5541107058525085, "learning_rate": 2.0208e-05, "loss": 0.0245, "step": 6739 }, { "epoch": 12.435826408125576, "grad_norm": 0.40229126811027527, "learning_rate": 2.0211e-05, "loss": 0.0147, "step": 6740 }, { "epoch": 12.437673130193906, "grad_norm": 0.7117041349411011, "learning_rate": 2.0214e-05, "loss": 0.0153, "step": 6741 }, { "epoch": 12.439519852262235, "grad_norm": 0.4517172574996948, "learning_rate": 2.0217000000000003e-05, "loss": 0.0233, "step": 6742 }, { "epoch": 12.441366574330564, "grad_norm": 0.6834757924079895, "learning_rate": 2.0220000000000003e-05, "loss": 0.0289, "step": 6743 }, { "epoch": 12.443213296398891, "grad_norm": 0.5634806156158447, "learning_rate": 2.0223000000000003e-05, "loss": 0.0321, "step": 6744 }, { "epoch": 12.44506001846722, "grad_norm": 0.42304977774620056, "learning_rate": 2.0226000000000003e-05, "loss": 0.0151, "step": 6745 }, { "epoch": 12.44690674053555, "grad_norm": 0.3649885654449463, "learning_rate": 2.0229000000000002e-05, "loss": 0.0168, "step": 6746 }, { "epoch": 12.448753462603879, "grad_norm": 0.44206562638282776, "learning_rate": 2.0232000000000002e-05, "loss": 0.0143, "step": 6747 }, { "epoch": 12.450600184672206, "grad_norm": 0.7201863527297974, "learning_rate": 2.0235e-05, "loss": 0.032, "step": 6748 }, { "epoch": 12.452446906740535, "grad_norm": 0.5601069927215576, "learning_rate": 2.0238e-05, "loss": 0.0189, "step": 6749 }, { "epoch": 12.454293628808864, "grad_norm": 0.4897482395172119, "learning_rate": 2.0240999999999998e-05, "loss": 0.0178, "step": 6750 }, { "epoch": 12.456140350877194, "grad_norm": 0.6514797210693359, "learning_rate": 2.0243999999999998e-05, "loss": 0.026, "step": 6751 }, { "epoch": 12.45798707294552, "grad_norm": 0.6403403282165527, "learning_rate": 2.0247e-05, "loss": 0.0298, "step": 6752 }, { "epoch": 12.45983379501385, "grad_norm": 0.4038439691066742, "learning_rate": 2.025e-05, "loss": 0.0197, "step": 6753 }, { "epoch": 12.46168051708218, "grad_norm": 0.5625746846199036, "learning_rate": 2.0253e-05, "loss": 0.0279, "step": 6754 }, { "epoch": 12.463527239150508, "grad_norm": 1.6312752962112427, "learning_rate": 2.0256e-05, "loss": 0.3294, "step": 6755 }, { "epoch": 12.465373961218837, "grad_norm": 0.7647871375083923, "learning_rate": 2.0259e-05, "loss": 0.1652, "step": 6756 }, { "epoch": 12.467220683287165, "grad_norm": 0.8227677941322327, "learning_rate": 2.0262e-05, "loss": 0.1735, "step": 6757 }, { "epoch": 12.469067405355494, "grad_norm": 0.6325469017028809, "learning_rate": 2.0265e-05, "loss": 0.1444, "step": 6758 }, { "epoch": 12.470914127423823, "grad_norm": 0.7547577619552612, "learning_rate": 2.0268e-05, "loss": 0.1389, "step": 6759 }, { "epoch": 12.472760849492152, "grad_norm": 0.5040287971496582, "learning_rate": 2.0271e-05, "loss": 0.084, "step": 6760 }, { "epoch": 12.47460757156048, "grad_norm": 0.5050820708274841, "learning_rate": 2.0274e-05, "loss": 0.1005, "step": 6761 }, { "epoch": 12.476454293628809, "grad_norm": 0.8370978832244873, "learning_rate": 2.0277e-05, "loss": 0.101, "step": 6762 }, { "epoch": 12.478301015697138, "grad_norm": 0.5290765762329102, "learning_rate": 2.0280000000000002e-05, "loss": 0.0826, "step": 6763 }, { "epoch": 12.480147737765467, "grad_norm": 0.529080331325531, "learning_rate": 2.0283000000000002e-05, "loss": 0.0676, "step": 6764 }, { "epoch": 12.481994459833794, "grad_norm": 0.6170281171798706, "learning_rate": 2.0286000000000002e-05, "loss": 0.1273, "step": 6765 }, { "epoch": 12.483841181902124, "grad_norm": 0.574787974357605, "learning_rate": 2.0289000000000002e-05, "loss": 0.0825, "step": 6766 }, { "epoch": 12.485687903970453, "grad_norm": 0.6429061889648438, "learning_rate": 2.0292e-05, "loss": 0.0628, "step": 6767 }, { "epoch": 12.487534626038782, "grad_norm": 0.4455440938472748, "learning_rate": 2.0295e-05, "loss": 0.0438, "step": 6768 }, { "epoch": 12.48938134810711, "grad_norm": 0.45541685819625854, "learning_rate": 2.0298e-05, "loss": 0.0672, "step": 6769 }, { "epoch": 12.491228070175438, "grad_norm": 0.4998338222503662, "learning_rate": 2.0301e-05, "loss": 0.0338, "step": 6770 }, { "epoch": 12.493074792243767, "grad_norm": 0.4116666913032532, "learning_rate": 2.0304e-05, "loss": 0.0408, "step": 6771 }, { "epoch": 12.494921514312097, "grad_norm": 0.3885330557823181, "learning_rate": 2.0307e-05, "loss": 0.0224, "step": 6772 }, { "epoch": 12.496768236380424, "grad_norm": 0.3494364619255066, "learning_rate": 2.031e-05, "loss": 0.0545, "step": 6773 }, { "epoch": 12.498614958448753, "grad_norm": 0.2879297733306885, "learning_rate": 2.0313e-05, "loss": 0.0503, "step": 6774 }, { "epoch": 12.500461680517082, "grad_norm": 0.3762493133544922, "learning_rate": 2.0316e-05, "loss": 0.0205, "step": 6775 }, { "epoch": 12.502308402585411, "grad_norm": 0.25101399421691895, "learning_rate": 2.0319e-05, "loss": 0.0166, "step": 6776 }, { "epoch": 12.504155124653739, "grad_norm": 0.49378249049186707, "learning_rate": 2.0322e-05, "loss": 0.0296, "step": 6777 }, { "epoch": 12.506001846722068, "grad_norm": 0.6454524993896484, "learning_rate": 2.0325e-05, "loss": 0.0301, "step": 6778 }, { "epoch": 12.507848568790397, "grad_norm": 0.372597336769104, "learning_rate": 2.0328e-05, "loss": 0.023, "step": 6779 }, { "epoch": 12.509695290858726, "grad_norm": 0.47888875007629395, "learning_rate": 2.0331e-05, "loss": 0.0267, "step": 6780 }, { "epoch": 12.511542012927055, "grad_norm": 0.2966492772102356, "learning_rate": 2.0334e-05, "loss": 0.0163, "step": 6781 }, { "epoch": 12.513388734995383, "grad_norm": 0.7563128471374512, "learning_rate": 2.0337e-05, "loss": 0.0213, "step": 6782 }, { "epoch": 12.515235457063712, "grad_norm": 0.3697977662086487, "learning_rate": 2.0340000000000002e-05, "loss": 0.0184, "step": 6783 }, { "epoch": 12.517082179132041, "grad_norm": 0.5888333916664124, "learning_rate": 2.0343e-05, "loss": 0.0147, "step": 6784 }, { "epoch": 12.51892890120037, "grad_norm": 0.4508650004863739, "learning_rate": 2.0346e-05, "loss": 0.0167, "step": 6785 }, { "epoch": 12.520775623268698, "grad_norm": 0.3996213972568512, "learning_rate": 2.0349e-05, "loss": 0.0117, "step": 6786 }, { "epoch": 12.522622345337027, "grad_norm": 0.3682596981525421, "learning_rate": 2.0352e-05, "loss": 0.0102, "step": 6787 }, { "epoch": 12.524469067405356, "grad_norm": 0.3281971216201782, "learning_rate": 2.0355e-05, "loss": 0.0099, "step": 6788 }, { "epoch": 12.526315789473685, "grad_norm": 0.6341251730918884, "learning_rate": 2.0358e-05, "loss": 0.0175, "step": 6789 }, { "epoch": 12.528162511542012, "grad_norm": 0.8982787728309631, "learning_rate": 2.0361e-05, "loss": 0.0379, "step": 6790 }, { "epoch": 12.530009233610341, "grad_norm": 0.629838228225708, "learning_rate": 2.0364e-05, "loss": 0.0259, "step": 6791 }, { "epoch": 12.53185595567867, "grad_norm": 0.4660007655620575, "learning_rate": 2.0367e-05, "loss": 0.0206, "step": 6792 }, { "epoch": 12.533702677747, "grad_norm": 0.7335576415061951, "learning_rate": 2.0370000000000003e-05, "loss": 0.0227, "step": 6793 }, { "epoch": 12.535549399815327, "grad_norm": 0.5546920299530029, "learning_rate": 2.0373000000000003e-05, "loss": 0.0224, "step": 6794 }, { "epoch": 12.537396121883656, "grad_norm": 0.5457835793495178, "learning_rate": 2.0376000000000003e-05, "loss": 0.0274, "step": 6795 }, { "epoch": 12.539242843951985, "grad_norm": 1.0218298435211182, "learning_rate": 2.0379000000000003e-05, "loss": 0.0188, "step": 6796 }, { "epoch": 12.541089566020315, "grad_norm": 0.6698922514915466, "learning_rate": 2.0382e-05, "loss": 0.0175, "step": 6797 }, { "epoch": 12.542936288088642, "grad_norm": 0.38196083903312683, "learning_rate": 2.0385e-05, "loss": 0.0291, "step": 6798 }, { "epoch": 12.544783010156971, "grad_norm": 0.3500259816646576, "learning_rate": 2.0388e-05, "loss": 0.0162, "step": 6799 }, { "epoch": 12.5466297322253, "grad_norm": 0.7685902714729309, "learning_rate": 2.0391e-05, "loss": 0.0223, "step": 6800 }, { "epoch": 12.54847645429363, "grad_norm": 0.901985764503479, "learning_rate": 2.0393999999999998e-05, "loss": 0.0397, "step": 6801 }, { "epoch": 12.550323176361957, "grad_norm": 0.3865233063697815, "learning_rate": 2.0396999999999998e-05, "loss": 0.0209, "step": 6802 }, { "epoch": 12.552169898430286, "grad_norm": 0.5133811831474304, "learning_rate": 2.04e-05, "loss": 0.0193, "step": 6803 }, { "epoch": 12.554016620498615, "grad_norm": 0.9574827551841736, "learning_rate": 2.0403e-05, "loss": 0.0598, "step": 6804 }, { "epoch": 12.555863342566944, "grad_norm": 1.2635304927825928, "learning_rate": 2.0406e-05, "loss": 0.2437, "step": 6805 }, { "epoch": 12.557710064635273, "grad_norm": 0.9194162487983704, "learning_rate": 2.0409e-05, "loss": 0.235, "step": 6806 }, { "epoch": 12.5595567867036, "grad_norm": 0.9686319231987, "learning_rate": 2.0412e-05, "loss": 0.149, "step": 6807 }, { "epoch": 12.56140350877193, "grad_norm": 0.5710583925247192, "learning_rate": 2.0415e-05, "loss": 0.1096, "step": 6808 }, { "epoch": 12.563250230840259, "grad_norm": 0.924613893032074, "learning_rate": 2.0418e-05, "loss": 0.1968, "step": 6809 }, { "epoch": 12.565096952908588, "grad_norm": 0.6345779299736023, "learning_rate": 2.0421e-05, "loss": 0.1546, "step": 6810 }, { "epoch": 12.566943674976915, "grad_norm": 0.5243431329727173, "learning_rate": 2.0424e-05, "loss": 0.0736, "step": 6811 }, { "epoch": 12.568790397045245, "grad_norm": 1.152990698814392, "learning_rate": 2.0427e-05, "loss": 0.1234, "step": 6812 }, { "epoch": 12.570637119113574, "grad_norm": 0.544137179851532, "learning_rate": 2.0430000000000003e-05, "loss": 0.0915, "step": 6813 }, { "epoch": 12.572483841181903, "grad_norm": 0.5432739853858948, "learning_rate": 2.0433000000000002e-05, "loss": 0.0802, "step": 6814 }, { "epoch": 12.57433056325023, "grad_norm": 0.4892103970050812, "learning_rate": 2.0436000000000002e-05, "loss": 0.1198, "step": 6815 }, { "epoch": 12.57617728531856, "grad_norm": 0.639560878276825, "learning_rate": 2.0439000000000002e-05, "loss": 0.0567, "step": 6816 }, { "epoch": 12.578024007386889, "grad_norm": 0.40163639187812805, "learning_rate": 2.0442000000000002e-05, "loss": 0.0621, "step": 6817 }, { "epoch": 12.579870729455218, "grad_norm": 0.42498865723609924, "learning_rate": 2.0445e-05, "loss": 0.0504, "step": 6818 }, { "epoch": 12.581717451523545, "grad_norm": 0.5056062936782837, "learning_rate": 2.0448e-05, "loss": 0.0545, "step": 6819 }, { "epoch": 12.583564173591874, "grad_norm": 0.46628761291503906, "learning_rate": 2.0451e-05, "loss": 0.0268, "step": 6820 }, { "epoch": 12.585410895660203, "grad_norm": 0.3696845769882202, "learning_rate": 2.0454e-05, "loss": 0.0186, "step": 6821 }, { "epoch": 12.587257617728532, "grad_norm": 0.3414132297039032, "learning_rate": 2.0456999999999997e-05, "loss": 0.0394, "step": 6822 }, { "epoch": 12.58910433979686, "grad_norm": 0.35303443670272827, "learning_rate": 2.046e-05, "loss": 0.0295, "step": 6823 }, { "epoch": 12.590951061865189, "grad_norm": 0.5817875266075134, "learning_rate": 2.0463e-05, "loss": 0.0226, "step": 6824 }, { "epoch": 12.592797783933518, "grad_norm": 0.33950066566467285, "learning_rate": 2.0466e-05, "loss": 0.0222, "step": 6825 }, { "epoch": 12.594644506001847, "grad_norm": 0.43999528884887695, "learning_rate": 2.0469e-05, "loss": 0.0229, "step": 6826 }, { "epoch": 12.596491228070175, "grad_norm": 0.3061814308166504, "learning_rate": 2.0472e-05, "loss": 0.0181, "step": 6827 }, { "epoch": 12.598337950138504, "grad_norm": 0.351764053106308, "learning_rate": 2.0475e-05, "loss": 0.0159, "step": 6828 }, { "epoch": 12.600184672206833, "grad_norm": 0.2815607488155365, "learning_rate": 2.0478e-05, "loss": 0.0184, "step": 6829 }, { "epoch": 12.602031394275162, "grad_norm": 0.5808914303779602, "learning_rate": 2.0481e-05, "loss": 0.036, "step": 6830 }, { "epoch": 12.603878116343491, "grad_norm": 0.41375821828842163, "learning_rate": 2.0484e-05, "loss": 0.0205, "step": 6831 }, { "epoch": 12.605724838411819, "grad_norm": 0.6034269332885742, "learning_rate": 2.0487e-05, "loss": 0.0296, "step": 6832 }, { "epoch": 12.607571560480148, "grad_norm": 0.5789977312088013, "learning_rate": 2.0490000000000002e-05, "loss": 0.0231, "step": 6833 }, { "epoch": 12.609418282548477, "grad_norm": 0.4856737554073334, "learning_rate": 2.0493000000000002e-05, "loss": 0.0156, "step": 6834 }, { "epoch": 12.611265004616806, "grad_norm": 0.5737854242324829, "learning_rate": 2.0496e-05, "loss": 0.0147, "step": 6835 }, { "epoch": 12.613111726685133, "grad_norm": 0.44736528396606445, "learning_rate": 2.0499e-05, "loss": 0.013, "step": 6836 }, { "epoch": 12.614958448753463, "grad_norm": 0.6260068416595459, "learning_rate": 2.0502e-05, "loss": 0.0262, "step": 6837 }, { "epoch": 12.616805170821792, "grad_norm": 0.31467726826667786, "learning_rate": 2.0505e-05, "loss": 0.0112, "step": 6838 }, { "epoch": 12.61865189289012, "grad_norm": 0.3031906485557556, "learning_rate": 2.0508e-05, "loss": 0.0175, "step": 6839 }, { "epoch": 12.620498614958448, "grad_norm": 0.5624115467071533, "learning_rate": 2.0511e-05, "loss": 0.0116, "step": 6840 }, { "epoch": 12.622345337026777, "grad_norm": 0.6537050008773804, "learning_rate": 2.0514e-05, "loss": 0.015, "step": 6841 }, { "epoch": 12.624192059095106, "grad_norm": 0.8279240727424622, "learning_rate": 2.0517e-05, "loss": 0.0222, "step": 6842 }, { "epoch": 12.626038781163436, "grad_norm": 0.6681758761405945, "learning_rate": 2.0520000000000003e-05, "loss": 0.0225, "step": 6843 }, { "epoch": 12.627885503231763, "grad_norm": 0.6742988228797913, "learning_rate": 2.0523000000000003e-05, "loss": 0.0287, "step": 6844 }, { "epoch": 12.629732225300092, "grad_norm": 0.3357849717140198, "learning_rate": 2.0526000000000003e-05, "loss": 0.0173, "step": 6845 }, { "epoch": 12.631578947368421, "grad_norm": 0.5499638915061951, "learning_rate": 2.0529e-05, "loss": 0.0165, "step": 6846 }, { "epoch": 12.63342566943675, "grad_norm": 0.4188133776187897, "learning_rate": 2.0532e-05, "loss": 0.0148, "step": 6847 }, { "epoch": 12.635272391505078, "grad_norm": 0.7594406008720398, "learning_rate": 2.0535e-05, "loss": 0.0307, "step": 6848 }, { "epoch": 12.637119113573407, "grad_norm": 0.7319937348365784, "learning_rate": 2.0538e-05, "loss": 0.0342, "step": 6849 }, { "epoch": 12.638965835641736, "grad_norm": 0.6182621717453003, "learning_rate": 2.0541e-05, "loss": 0.0176, "step": 6850 }, { "epoch": 12.640812557710065, "grad_norm": 0.4317701756954193, "learning_rate": 2.0544e-05, "loss": 0.0197, "step": 6851 }, { "epoch": 12.642659279778393, "grad_norm": 0.9269976615905762, "learning_rate": 2.0546999999999998e-05, "loss": 0.0304, "step": 6852 }, { "epoch": 12.644506001846722, "grad_norm": 0.6142908930778503, "learning_rate": 2.055e-05, "loss": 0.0223, "step": 6853 }, { "epoch": 12.64635272391505, "grad_norm": 1.2634549140930176, "learning_rate": 2.0553e-05, "loss": 0.0243, "step": 6854 }, { "epoch": 12.64819944598338, "grad_norm": 0.7594727873802185, "learning_rate": 2.0556e-05, "loss": 0.2272, "step": 6855 }, { "epoch": 12.65004616805171, "grad_norm": 0.7709804177284241, "learning_rate": 2.0559e-05, "loss": 0.2132, "step": 6856 }, { "epoch": 12.651892890120036, "grad_norm": 0.6140466928482056, "learning_rate": 2.0562e-05, "loss": 0.1399, "step": 6857 }, { "epoch": 12.653739612188366, "grad_norm": 0.6246731877326965, "learning_rate": 2.0565e-05, "loss": 0.1812, "step": 6858 }, { "epoch": 12.655586334256695, "grad_norm": 1.0530415773391724, "learning_rate": 2.0568e-05, "loss": 0.1882, "step": 6859 }, { "epoch": 12.657433056325024, "grad_norm": 0.48276087641716003, "learning_rate": 2.0571e-05, "loss": 0.0938, "step": 6860 }, { "epoch": 12.659279778393351, "grad_norm": 0.5793479681015015, "learning_rate": 2.0574e-05, "loss": 0.1123, "step": 6861 }, { "epoch": 12.66112650046168, "grad_norm": 0.8910770416259766, "learning_rate": 2.0577e-05, "loss": 0.1138, "step": 6862 }, { "epoch": 12.66297322253001, "grad_norm": 0.6055312752723694, "learning_rate": 2.0580000000000003e-05, "loss": 0.0824, "step": 6863 }, { "epoch": 12.664819944598339, "grad_norm": 0.6477535963058472, "learning_rate": 2.0583000000000003e-05, "loss": 0.1224, "step": 6864 }, { "epoch": 12.666666666666666, "grad_norm": 0.44331997632980347, "learning_rate": 2.0586000000000002e-05, "loss": 0.0543, "step": 6865 }, { "epoch": 12.668513388734995, "grad_norm": 0.6174275875091553, "learning_rate": 2.0589000000000002e-05, "loss": 0.0554, "step": 6866 }, { "epoch": 12.670360110803324, "grad_norm": 0.5855109691619873, "learning_rate": 2.0592000000000002e-05, "loss": 0.0414, "step": 6867 }, { "epoch": 12.672206832871654, "grad_norm": 0.666130542755127, "learning_rate": 2.0595000000000002e-05, "loss": 0.0647, "step": 6868 }, { "epoch": 12.67405355493998, "grad_norm": 0.5880280137062073, "learning_rate": 2.0598e-05, "loss": 0.0468, "step": 6869 }, { "epoch": 12.67590027700831, "grad_norm": 0.3871926963329315, "learning_rate": 2.0601e-05, "loss": 0.0756, "step": 6870 }, { "epoch": 12.67774699907664, "grad_norm": 0.45748671889305115, "learning_rate": 2.0603999999999998e-05, "loss": 0.0349, "step": 6871 }, { "epoch": 12.679593721144968, "grad_norm": 0.37007564306259155, "learning_rate": 2.0606999999999998e-05, "loss": 0.0257, "step": 6872 }, { "epoch": 12.681440443213296, "grad_norm": 0.5523969531059265, "learning_rate": 2.061e-05, "loss": 0.0196, "step": 6873 }, { "epoch": 12.683287165281625, "grad_norm": 0.5408273339271545, "learning_rate": 2.0613e-05, "loss": 0.0339, "step": 6874 }, { "epoch": 12.685133887349954, "grad_norm": 0.42680907249450684, "learning_rate": 2.0616e-05, "loss": 0.0147, "step": 6875 }, { "epoch": 12.686980609418283, "grad_norm": 0.4050646424293518, "learning_rate": 2.0619e-05, "loss": 0.0205, "step": 6876 }, { "epoch": 12.68882733148661, "grad_norm": 0.3192436695098877, "learning_rate": 2.0622e-05, "loss": 0.0152, "step": 6877 }, { "epoch": 12.69067405355494, "grad_norm": 0.49936342239379883, "learning_rate": 2.0625e-05, "loss": 0.0176, "step": 6878 }, { "epoch": 12.692520775623269, "grad_norm": 0.3989526331424713, "learning_rate": 2.0628e-05, "loss": 0.0192, "step": 6879 }, { "epoch": 12.694367497691598, "grad_norm": 0.46741893887519836, "learning_rate": 2.0631e-05, "loss": 0.0197, "step": 6880 }, { "epoch": 12.696214219759927, "grad_norm": 0.3675607740879059, "learning_rate": 2.0634e-05, "loss": 0.0128, "step": 6881 }, { "epoch": 12.698060941828254, "grad_norm": 0.48503541946411133, "learning_rate": 2.0637e-05, "loss": 0.0245, "step": 6882 }, { "epoch": 12.699907663896584, "grad_norm": 0.6410048007965088, "learning_rate": 2.064e-05, "loss": 0.0165, "step": 6883 }, { "epoch": 12.701754385964913, "grad_norm": 0.6283408999443054, "learning_rate": 2.0643000000000002e-05, "loss": 0.0239, "step": 6884 }, { "epoch": 12.703601108033242, "grad_norm": 0.49065521359443665, "learning_rate": 2.0646000000000002e-05, "loss": 0.028, "step": 6885 }, { "epoch": 12.70544783010157, "grad_norm": 0.47825199365615845, "learning_rate": 2.0649e-05, "loss": 0.0219, "step": 6886 }, { "epoch": 12.707294552169898, "grad_norm": 2.441694736480713, "learning_rate": 2.0652e-05, "loss": 0.0195, "step": 6887 }, { "epoch": 12.709141274238227, "grad_norm": 0.7367351651191711, "learning_rate": 2.0655e-05, "loss": 0.0298, "step": 6888 }, { "epoch": 12.710987996306557, "grad_norm": 0.46378958225250244, "learning_rate": 2.0658e-05, "loss": 0.018, "step": 6889 }, { "epoch": 12.712834718374884, "grad_norm": 0.43975552916526794, "learning_rate": 2.0661e-05, "loss": 0.02, "step": 6890 }, { "epoch": 12.714681440443213, "grad_norm": 0.5574910640716553, "learning_rate": 2.0664e-05, "loss": 0.0221, "step": 6891 }, { "epoch": 12.716528162511542, "grad_norm": 0.459857314825058, "learning_rate": 2.0667e-05, "loss": 0.0153, "step": 6892 }, { "epoch": 12.718374884579871, "grad_norm": 0.4340030550956726, "learning_rate": 2.067e-05, "loss": 0.0166, "step": 6893 }, { "epoch": 12.720221606648199, "grad_norm": 0.8469353914260864, "learning_rate": 2.0673000000000003e-05, "loss": 0.0214, "step": 6894 }, { "epoch": 12.722068328716528, "grad_norm": 0.48159655928611755, "learning_rate": 2.0676e-05, "loss": 0.022, "step": 6895 }, { "epoch": 12.723915050784857, "grad_norm": 1.0634124279022217, "learning_rate": 2.0679e-05, "loss": 0.0298, "step": 6896 }, { "epoch": 12.725761772853186, "grad_norm": 0.3850836157798767, "learning_rate": 2.0682e-05, "loss": 0.0136, "step": 6897 }, { "epoch": 12.727608494921514, "grad_norm": 0.5385677814483643, "learning_rate": 2.0685e-05, "loss": 0.0265, "step": 6898 }, { "epoch": 12.729455216989843, "grad_norm": 0.7085365653038025, "learning_rate": 2.0688e-05, "loss": 0.0128, "step": 6899 }, { "epoch": 12.731301939058172, "grad_norm": 0.5235032439231873, "learning_rate": 2.0691e-05, "loss": 0.0233, "step": 6900 }, { "epoch": 12.733148661126501, "grad_norm": 0.6124913096427917, "learning_rate": 2.0694e-05, "loss": 0.0154, "step": 6901 }, { "epoch": 12.734995383194828, "grad_norm": 0.4355025291442871, "learning_rate": 2.0697e-05, "loss": 0.0184, "step": 6902 }, { "epoch": 12.736842105263158, "grad_norm": 0.6916985511779785, "learning_rate": 2.07e-05, "loss": 0.0301, "step": 6903 }, { "epoch": 12.738688827331487, "grad_norm": 0.7459660768508911, "learning_rate": 2.0703e-05, "loss": 0.0351, "step": 6904 }, { "epoch": 12.740535549399816, "grad_norm": 0.7205683588981628, "learning_rate": 2.0706e-05, "loss": 0.206, "step": 6905 }, { "epoch": 12.742382271468145, "grad_norm": 1.1066662073135376, "learning_rate": 2.0709e-05, "loss": 0.2021, "step": 6906 }, { "epoch": 12.744228993536472, "grad_norm": 0.7279828190803528, "learning_rate": 2.0712e-05, "loss": 0.2561, "step": 6907 }, { "epoch": 12.746075715604801, "grad_norm": 0.4925101101398468, "learning_rate": 2.0715e-05, "loss": 0.121, "step": 6908 }, { "epoch": 12.74792243767313, "grad_norm": 0.7707668542861938, "learning_rate": 2.0718e-05, "loss": 0.1928, "step": 6909 }, { "epoch": 12.749769159741458, "grad_norm": 0.7203795313835144, "learning_rate": 2.0721e-05, "loss": 0.1554, "step": 6910 }, { "epoch": 12.751615881809787, "grad_norm": 0.5161910653114319, "learning_rate": 2.0724e-05, "loss": 0.1218, "step": 6911 }, { "epoch": 12.753462603878116, "grad_norm": 0.6372627019882202, "learning_rate": 2.0727e-05, "loss": 0.0966, "step": 6912 }, { "epoch": 12.755309325946445, "grad_norm": 0.4801217019557953, "learning_rate": 2.073e-05, "loss": 0.0698, "step": 6913 }, { "epoch": 12.757156048014775, "grad_norm": 0.4829772710800171, "learning_rate": 2.0733000000000003e-05, "loss": 0.0823, "step": 6914 }, { "epoch": 12.759002770083102, "grad_norm": 0.41738224029541016, "learning_rate": 2.0736000000000003e-05, "loss": 0.0433, "step": 6915 }, { "epoch": 12.760849492151431, "grad_norm": 0.7026256322860718, "learning_rate": 2.0739000000000003e-05, "loss": 0.0618, "step": 6916 }, { "epoch": 12.76269621421976, "grad_norm": 0.8881515860557556, "learning_rate": 2.0742000000000002e-05, "loss": 0.1348, "step": 6917 }, { "epoch": 12.76454293628809, "grad_norm": 0.6656627655029297, "learning_rate": 2.0745000000000002e-05, "loss": 0.0435, "step": 6918 }, { "epoch": 12.766389658356417, "grad_norm": 0.6259805560112, "learning_rate": 2.0748000000000002e-05, "loss": 0.0728, "step": 6919 }, { "epoch": 12.768236380424746, "grad_norm": 0.7157806754112244, "learning_rate": 2.0751e-05, "loss": 0.0545, "step": 6920 }, { "epoch": 12.770083102493075, "grad_norm": 0.39127853512763977, "learning_rate": 2.0753999999999998e-05, "loss": 0.0329, "step": 6921 }, { "epoch": 12.771929824561404, "grad_norm": 1.0186903476715088, "learning_rate": 2.0756999999999998e-05, "loss": 0.0205, "step": 6922 }, { "epoch": 12.773776546629731, "grad_norm": 0.7008572816848755, "learning_rate": 2.0759999999999998e-05, "loss": 0.0323, "step": 6923 }, { "epoch": 12.77562326869806, "grad_norm": 0.5930014848709106, "learning_rate": 2.0763e-05, "loss": 0.0316, "step": 6924 }, { "epoch": 12.77746999076639, "grad_norm": 0.3785812258720398, "learning_rate": 2.0766e-05, "loss": 0.0181, "step": 6925 }, { "epoch": 12.779316712834719, "grad_norm": 0.3636646568775177, "learning_rate": 2.0769e-05, "loss": 0.0156, "step": 6926 }, { "epoch": 12.781163434903046, "grad_norm": 0.42580145597457886, "learning_rate": 2.0772e-05, "loss": 0.0301, "step": 6927 }, { "epoch": 12.783010156971375, "grad_norm": 0.23977164924144745, "learning_rate": 2.0775e-05, "loss": 0.0156, "step": 6928 }, { "epoch": 12.784856879039705, "grad_norm": 0.4149671494960785, "learning_rate": 2.0778e-05, "loss": 0.021, "step": 6929 }, { "epoch": 12.786703601108034, "grad_norm": 0.6454666256904602, "learning_rate": 2.0781e-05, "loss": 0.029, "step": 6930 }, { "epoch": 12.788550323176363, "grad_norm": 1.481869101524353, "learning_rate": 2.0784e-05, "loss": 0.0257, "step": 6931 }, { "epoch": 12.79039704524469, "grad_norm": 0.35012781620025635, "learning_rate": 2.0787e-05, "loss": 0.0194, "step": 6932 }, { "epoch": 12.79224376731302, "grad_norm": 0.32981836795806885, "learning_rate": 2.079e-05, "loss": 0.0232, "step": 6933 }, { "epoch": 12.794090489381349, "grad_norm": 0.683250904083252, "learning_rate": 2.0793000000000002e-05, "loss": 0.0243, "step": 6934 }, { "epoch": 12.795937211449676, "grad_norm": 0.5012224912643433, "learning_rate": 2.0796000000000002e-05, "loss": 0.0142, "step": 6935 }, { "epoch": 12.797783933518005, "grad_norm": 1.3063740730285645, "learning_rate": 2.0799000000000002e-05, "loss": 0.0624, "step": 6936 }, { "epoch": 12.799630655586334, "grad_norm": 0.4880787134170532, "learning_rate": 2.0802000000000002e-05, "loss": 0.027, "step": 6937 }, { "epoch": 12.801477377654663, "grad_norm": 0.23685327172279358, "learning_rate": 2.0805e-05, "loss": 0.0076, "step": 6938 }, { "epoch": 12.803324099722992, "grad_norm": 0.5517817139625549, "learning_rate": 2.0808e-05, "loss": 0.0151, "step": 6939 }, { "epoch": 12.80517082179132, "grad_norm": 0.28913408517837524, "learning_rate": 2.0811e-05, "loss": 0.0151, "step": 6940 }, { "epoch": 12.807017543859649, "grad_norm": 0.5635241866111755, "learning_rate": 2.0814e-05, "loss": 0.0377, "step": 6941 }, { "epoch": 12.808864265927978, "grad_norm": 0.5010838508605957, "learning_rate": 2.0817e-05, "loss": 0.0202, "step": 6942 }, { "epoch": 12.810710987996307, "grad_norm": 0.3855663537979126, "learning_rate": 2.082e-05, "loss": 0.0138, "step": 6943 }, { "epoch": 12.812557710064635, "grad_norm": 0.8619406223297119, "learning_rate": 2.0823e-05, "loss": 0.0257, "step": 6944 }, { "epoch": 12.814404432132964, "grad_norm": 0.30185648798942566, "learning_rate": 2.0826e-05, "loss": 0.0166, "step": 6945 }, { "epoch": 12.816251154201293, "grad_norm": 0.6880849003791809, "learning_rate": 2.0829e-05, "loss": 0.0246, "step": 6946 }, { "epoch": 12.818097876269622, "grad_norm": 0.5926246643066406, "learning_rate": 2.0832e-05, "loss": 0.0255, "step": 6947 }, { "epoch": 12.81994459833795, "grad_norm": 0.4034042954444885, "learning_rate": 2.0835e-05, "loss": 0.0137, "step": 6948 }, { "epoch": 12.821791320406279, "grad_norm": 0.7444289326667786, "learning_rate": 2.0838e-05, "loss": 0.0219, "step": 6949 }, { "epoch": 12.823638042474608, "grad_norm": 0.2735901474952698, "learning_rate": 2.0841e-05, "loss": 0.0143, "step": 6950 }, { "epoch": 12.825484764542937, "grad_norm": 0.4763954281806946, "learning_rate": 2.0844e-05, "loss": 0.0129, "step": 6951 }, { "epoch": 12.827331486611264, "grad_norm": 0.7796971797943115, "learning_rate": 2.0847e-05, "loss": 0.0207, "step": 6952 }, { "epoch": 12.829178208679593, "grad_norm": 0.6892587542533875, "learning_rate": 2.085e-05, "loss": 0.034, "step": 6953 }, { "epoch": 12.831024930747922, "grad_norm": 2.045210361480713, "learning_rate": 2.0853000000000002e-05, "loss": 0.0358, "step": 6954 }, { "epoch": 12.832871652816252, "grad_norm": 0.6591195464134216, "learning_rate": 2.0856e-05, "loss": 0.2097, "step": 6955 }, { "epoch": 12.83471837488458, "grad_norm": 0.7082017660140991, "learning_rate": 2.0859e-05, "loss": 0.2276, "step": 6956 }, { "epoch": 12.836565096952908, "grad_norm": 0.5350101590156555, "learning_rate": 2.0862e-05, "loss": 0.1638, "step": 6957 }, { "epoch": 12.838411819021237, "grad_norm": 0.6802604794502258, "learning_rate": 2.0865e-05, "loss": 0.2066, "step": 6958 }, { "epoch": 12.840258541089566, "grad_norm": 0.9725978970527649, "learning_rate": 2.0868e-05, "loss": 0.1875, "step": 6959 }, { "epoch": 12.842105263157894, "grad_norm": 0.6738961338996887, "learning_rate": 2.0871e-05, "loss": 0.1481, "step": 6960 }, { "epoch": 12.843951985226223, "grad_norm": 0.5680983662605286, "learning_rate": 2.0874e-05, "loss": 0.1173, "step": 6961 }, { "epoch": 12.845798707294552, "grad_norm": 0.45616841316223145, "learning_rate": 2.0877e-05, "loss": 0.0983, "step": 6962 }, { "epoch": 12.847645429362881, "grad_norm": 0.598066508769989, "learning_rate": 2.088e-05, "loss": 0.1052, "step": 6963 }, { "epoch": 12.84949215143121, "grad_norm": 1.4628897905349731, "learning_rate": 2.0883000000000003e-05, "loss": 0.1242, "step": 6964 }, { "epoch": 12.851338873499538, "grad_norm": 0.8236523866653442, "learning_rate": 2.0886000000000003e-05, "loss": 0.0854, "step": 6965 }, { "epoch": 12.853185595567867, "grad_norm": 0.4810824990272522, "learning_rate": 2.0889000000000003e-05, "loss": 0.0581, "step": 6966 }, { "epoch": 12.855032317636196, "grad_norm": 0.3711375296115875, "learning_rate": 2.0892000000000003e-05, "loss": 0.0426, "step": 6967 }, { "epoch": 12.856879039704525, "grad_norm": 0.347176730632782, "learning_rate": 2.0895000000000002e-05, "loss": 0.051, "step": 6968 }, { "epoch": 12.858725761772853, "grad_norm": 0.33575108647346497, "learning_rate": 2.0898e-05, "loss": 0.0452, "step": 6969 }, { "epoch": 12.860572483841182, "grad_norm": 0.35422179102897644, "learning_rate": 2.0901e-05, "loss": 0.0251, "step": 6970 }, { "epoch": 12.86241920590951, "grad_norm": 0.23786962032318115, "learning_rate": 2.0904e-05, "loss": 0.0274, "step": 6971 }, { "epoch": 12.86426592797784, "grad_norm": 0.4184572696685791, "learning_rate": 2.0906999999999998e-05, "loss": 0.0333, "step": 6972 }, { "epoch": 12.866112650046167, "grad_norm": 0.3609471023082733, "learning_rate": 2.0909999999999998e-05, "loss": 0.0286, "step": 6973 }, { "epoch": 12.867959372114496, "grad_norm": 0.7237544655799866, "learning_rate": 2.0913e-05, "loss": 0.0644, "step": 6974 }, { "epoch": 12.869806094182826, "grad_norm": 0.49987494945526123, "learning_rate": 2.0916e-05, "loss": 0.0315, "step": 6975 }, { "epoch": 12.871652816251155, "grad_norm": 0.291019082069397, "learning_rate": 2.0919e-05, "loss": 0.0161, "step": 6976 }, { "epoch": 12.873499538319482, "grad_norm": 0.3615555465221405, "learning_rate": 2.0922e-05, "loss": 0.0203, "step": 6977 }, { "epoch": 12.875346260387811, "grad_norm": 0.2607676088809967, "learning_rate": 2.0925e-05, "loss": 0.0171, "step": 6978 }, { "epoch": 12.87719298245614, "grad_norm": 0.30133187770843506, "learning_rate": 2.0928e-05, "loss": 0.0241, "step": 6979 }, { "epoch": 12.87903970452447, "grad_norm": 0.30178168416023254, "learning_rate": 2.0931e-05, "loss": 0.0131, "step": 6980 }, { "epoch": 12.880886426592799, "grad_norm": 0.31973060965538025, "learning_rate": 2.0934e-05, "loss": 0.0171, "step": 6981 }, { "epoch": 12.882733148661126, "grad_norm": 0.4676547348499298, "learning_rate": 2.0937e-05, "loss": 0.0241, "step": 6982 }, { "epoch": 12.884579870729455, "grad_norm": 0.49283716082572937, "learning_rate": 2.094e-05, "loss": 0.0262, "step": 6983 }, { "epoch": 12.886426592797784, "grad_norm": 0.651775062084198, "learning_rate": 2.0943000000000003e-05, "loss": 0.0193, "step": 6984 }, { "epoch": 12.888273314866112, "grad_norm": 0.41634607315063477, "learning_rate": 2.0946000000000002e-05, "loss": 0.0168, "step": 6985 }, { "epoch": 12.89012003693444, "grad_norm": 0.4039091169834137, "learning_rate": 2.0949000000000002e-05, "loss": 0.019, "step": 6986 }, { "epoch": 12.89196675900277, "grad_norm": 0.8984862565994263, "learning_rate": 2.0952000000000002e-05, "loss": 0.0301, "step": 6987 }, { "epoch": 12.8938134810711, "grad_norm": 0.3584153950214386, "learning_rate": 2.0955000000000002e-05, "loss": 0.0124, "step": 6988 }, { "epoch": 12.895660203139428, "grad_norm": 0.27817732095718384, "learning_rate": 2.0958e-05, "loss": 0.0133, "step": 6989 }, { "epoch": 12.897506925207756, "grad_norm": 0.5320377945899963, "learning_rate": 2.0961e-05, "loss": 0.0254, "step": 6990 }, { "epoch": 12.899353647276085, "grad_norm": 0.3643032908439636, "learning_rate": 2.0964e-05, "loss": 0.0121, "step": 6991 }, { "epoch": 12.901200369344414, "grad_norm": 0.5568037033081055, "learning_rate": 2.0967e-05, "loss": 0.035, "step": 6992 }, { "epoch": 12.903047091412743, "grad_norm": 0.30891767144203186, "learning_rate": 2.097e-05, "loss": 0.0108, "step": 6993 }, { "epoch": 12.90489381348107, "grad_norm": 0.7795283794403076, "learning_rate": 2.0973e-05, "loss": 0.0205, "step": 6994 }, { "epoch": 12.9067405355494, "grad_norm": 0.4496384561061859, "learning_rate": 2.0976e-05, "loss": 0.0161, "step": 6995 }, { "epoch": 12.908587257617729, "grad_norm": 0.37556585669517517, "learning_rate": 2.0979e-05, "loss": 0.0177, "step": 6996 }, { "epoch": 12.910433979686058, "grad_norm": 0.6202804446220398, "learning_rate": 2.0982e-05, "loss": 0.028, "step": 6997 }, { "epoch": 12.912280701754385, "grad_norm": 0.3959607183933258, "learning_rate": 2.0985e-05, "loss": 0.0165, "step": 6998 }, { "epoch": 12.914127423822714, "grad_norm": 0.8058693408966064, "learning_rate": 2.0988e-05, "loss": 0.0333, "step": 6999 }, { "epoch": 12.915974145891044, "grad_norm": 0.4133374094963074, "learning_rate": 2.0991e-05, "loss": 0.0162, "step": 7000 }, { "epoch": 12.915974145891044, "eval_cer": 0.11390206599787438, "eval_loss": 0.3320147395133972, "eval_runtime": 15.7794, "eval_samples_per_second": 19.266, "eval_steps_per_second": 0.634, "eval_wer": 0.3983115886415963, "step": 7000 }, { "epoch": 12.917820867959373, "grad_norm": 0.383863627910614, "learning_rate": 2.0994e-05, "loss": 0.0181, "step": 7001 }, { "epoch": 12.9196675900277, "grad_norm": 0.5668813586235046, "learning_rate": 2.0997e-05, "loss": 0.0153, "step": 7002 }, { "epoch": 12.92151431209603, "grad_norm": 0.4858815371990204, "learning_rate": 2.1e-05, "loss": 0.0214, "step": 7003 }, { "epoch": 12.923361034164358, "grad_norm": 0.3549613356590271, "learning_rate": 2.1003e-05, "loss": 0.0165, "step": 7004 }, { "epoch": 12.925207756232687, "grad_norm": 0.8246420621871948, "learning_rate": 2.1006000000000002e-05, "loss": 0.2249, "step": 7005 }, { "epoch": 12.927054478301017, "grad_norm": 0.8446950316429138, "learning_rate": 2.1009e-05, "loss": 0.224, "step": 7006 }, { "epoch": 12.928901200369344, "grad_norm": 0.9744293689727783, "learning_rate": 2.1012e-05, "loss": 0.1771, "step": 7007 }, { "epoch": 12.930747922437673, "grad_norm": 0.7374587655067444, "learning_rate": 2.1015e-05, "loss": 0.1285, "step": 7008 }, { "epoch": 12.932594644506002, "grad_norm": 0.9851799607276917, "learning_rate": 2.1018e-05, "loss": 0.1067, "step": 7009 }, { "epoch": 12.93444136657433, "grad_norm": 0.5872266888618469, "learning_rate": 2.1021e-05, "loss": 0.1105, "step": 7010 }, { "epoch": 12.936288088642659, "grad_norm": 0.7037659287452698, "learning_rate": 2.1024e-05, "loss": 0.1388, "step": 7011 }, { "epoch": 12.938134810710988, "grad_norm": 0.7483109831809998, "learning_rate": 2.1027e-05, "loss": 0.0796, "step": 7012 }, { "epoch": 12.939981532779317, "grad_norm": 0.5599822998046875, "learning_rate": 2.103e-05, "loss": 0.0798, "step": 7013 }, { "epoch": 12.941828254847646, "grad_norm": 0.5699829459190369, "learning_rate": 2.1033e-05, "loss": 0.1105, "step": 7014 }, { "epoch": 12.943674976915974, "grad_norm": 0.6335070133209229, "learning_rate": 2.1036000000000003e-05, "loss": 0.0509, "step": 7015 }, { "epoch": 12.945521698984303, "grad_norm": 0.4124175012111664, "learning_rate": 2.1039000000000003e-05, "loss": 0.0774, "step": 7016 }, { "epoch": 12.947368421052632, "grad_norm": 0.5848485827445984, "learning_rate": 2.1042000000000003e-05, "loss": 0.035, "step": 7017 }, { "epoch": 12.949215143120961, "grad_norm": 0.35119831562042236, "learning_rate": 2.1045e-05, "loss": 0.0339, "step": 7018 }, { "epoch": 12.951061865189288, "grad_norm": 0.38088950514793396, "learning_rate": 2.1048e-05, "loss": 0.0242, "step": 7019 }, { "epoch": 12.952908587257618, "grad_norm": 0.2906529903411865, "learning_rate": 2.1051e-05, "loss": 0.0179, "step": 7020 }, { "epoch": 12.954755309325947, "grad_norm": 0.25520816445350647, "learning_rate": 2.1054e-05, "loss": 0.0201, "step": 7021 }, { "epoch": 12.956602031394276, "grad_norm": 0.8681545257568359, "learning_rate": 2.1057e-05, "loss": 0.0407, "step": 7022 }, { "epoch": 12.958448753462603, "grad_norm": 0.6451759934425354, "learning_rate": 2.1059999999999998e-05, "loss": 0.0216, "step": 7023 }, { "epoch": 12.960295475530932, "grad_norm": 0.24663574993610382, "learning_rate": 2.1062999999999998e-05, "loss": 0.0137, "step": 7024 }, { "epoch": 12.962142197599261, "grad_norm": 0.44320282340049744, "learning_rate": 2.1066e-05, "loss": 0.0271, "step": 7025 }, { "epoch": 12.96398891966759, "grad_norm": 0.673911988735199, "learning_rate": 2.1069e-05, "loss": 0.0374, "step": 7026 }, { "epoch": 12.965835641735918, "grad_norm": 0.3014264702796936, "learning_rate": 2.1072e-05, "loss": 0.0093, "step": 7027 }, { "epoch": 12.967682363804247, "grad_norm": 0.35082635283470154, "learning_rate": 2.1075e-05, "loss": 0.0301, "step": 7028 }, { "epoch": 12.969529085872576, "grad_norm": 0.41949841380119324, "learning_rate": 2.1078e-05, "loss": 0.0239, "step": 7029 }, { "epoch": 12.971375807940905, "grad_norm": 0.42678096890449524, "learning_rate": 2.1081e-05, "loss": 0.0199, "step": 7030 }, { "epoch": 12.973222530009235, "grad_norm": 0.36272698640823364, "learning_rate": 2.1084e-05, "loss": 0.0106, "step": 7031 }, { "epoch": 12.975069252077562, "grad_norm": 0.9446842670440674, "learning_rate": 2.1087e-05, "loss": 0.052, "step": 7032 }, { "epoch": 12.976915974145891, "grad_norm": 0.46493256092071533, "learning_rate": 2.109e-05, "loss": 0.0224, "step": 7033 }, { "epoch": 12.97876269621422, "grad_norm": 0.5906376838684082, "learning_rate": 2.1093e-05, "loss": 0.0228, "step": 7034 }, { "epoch": 12.980609418282548, "grad_norm": 0.463929682970047, "learning_rate": 2.1096000000000003e-05, "loss": 0.0156, "step": 7035 }, { "epoch": 12.982456140350877, "grad_norm": 0.4736950099468231, "learning_rate": 2.1099000000000002e-05, "loss": 0.0205, "step": 7036 }, { "epoch": 12.984302862419206, "grad_norm": 0.48260924220085144, "learning_rate": 2.1102000000000002e-05, "loss": 0.0207, "step": 7037 }, { "epoch": 12.986149584487535, "grad_norm": 0.6692067384719849, "learning_rate": 2.1105000000000002e-05, "loss": 0.0157, "step": 7038 }, { "epoch": 12.987996306555864, "grad_norm": 0.6906869411468506, "learning_rate": 2.1108000000000002e-05, "loss": 0.0335, "step": 7039 }, { "epoch": 12.989843028624191, "grad_norm": 0.5933719873428345, "learning_rate": 2.1111e-05, "loss": 0.0265, "step": 7040 }, { "epoch": 12.99168975069252, "grad_norm": 0.36224380135536194, "learning_rate": 2.1114e-05, "loss": 0.0172, "step": 7041 }, { "epoch": 12.99353647276085, "grad_norm": 0.5262947082519531, "learning_rate": 2.1117e-05, "loss": 0.0283, "step": 7042 }, { "epoch": 12.995383194829179, "grad_norm": 0.4658443331718445, "learning_rate": 2.1119999999999998e-05, "loss": 0.0214, "step": 7043 }, { "epoch": 12.997229916897506, "grad_norm": 0.6707825064659119, "learning_rate": 2.1122999999999997e-05, "loss": 0.0173, "step": 7044 }, { "epoch": 12.999076638965835, "grad_norm": 0.6804872751235962, "learning_rate": 2.1126e-05, "loss": 0.0282, "step": 7045 }, { "epoch": 13.0, "grad_norm": 0.5780600309371948, "learning_rate": 2.1129e-05, "loss": 0.0378, "step": 7046 }, { "epoch": 13.00184672206833, "grad_norm": 1.1009926795959473, "learning_rate": 2.1132e-05, "loss": 0.2053, "step": 7047 }, { "epoch": 13.003693444136658, "grad_norm": 0.5999391078948975, "learning_rate": 2.1135e-05, "loss": 0.1755, "step": 7048 }, { "epoch": 13.005540166204986, "grad_norm": 0.556535542011261, "learning_rate": 2.1138e-05, "loss": 0.1356, "step": 7049 }, { "epoch": 13.007386888273315, "grad_norm": 0.600877583026886, "learning_rate": 2.1141e-05, "loss": 0.1368, "step": 7050 }, { "epoch": 13.009233610341644, "grad_norm": 0.7779603600502014, "learning_rate": 2.1144e-05, "loss": 0.1396, "step": 7051 }, { "epoch": 13.011080332409973, "grad_norm": 0.959456205368042, "learning_rate": 2.1147e-05, "loss": 0.1214, "step": 7052 }, { "epoch": 13.0129270544783, "grad_norm": 0.6287292242050171, "learning_rate": 2.115e-05, "loss": 0.1031, "step": 7053 }, { "epoch": 13.01477377654663, "grad_norm": 1.0254368782043457, "learning_rate": 2.1153e-05, "loss": 0.157, "step": 7054 }, { "epoch": 13.016620498614959, "grad_norm": 0.43327924609184265, "learning_rate": 2.1156000000000002e-05, "loss": 0.0638, "step": 7055 }, { "epoch": 13.018467220683288, "grad_norm": 0.6945347189903259, "learning_rate": 2.1159000000000002e-05, "loss": 0.1202, "step": 7056 }, { "epoch": 13.020313942751615, "grad_norm": 0.4801901578903198, "learning_rate": 2.1162e-05, "loss": 0.0553, "step": 7057 }, { "epoch": 13.022160664819944, "grad_norm": 0.6773169636726379, "learning_rate": 2.1165e-05, "loss": 0.0592, "step": 7058 }, { "epoch": 13.024007386888274, "grad_norm": 0.3857210874557495, "learning_rate": 2.1168e-05, "loss": 0.044, "step": 7059 }, { "epoch": 13.025854108956603, "grad_norm": 0.43382251262664795, "learning_rate": 2.1171e-05, "loss": 0.0577, "step": 7060 }, { "epoch": 13.02770083102493, "grad_norm": 0.5019907355308533, "learning_rate": 2.1174e-05, "loss": 0.0489, "step": 7061 }, { "epoch": 13.02954755309326, "grad_norm": 0.396818608045578, "learning_rate": 2.1177e-05, "loss": 0.045, "step": 7062 }, { "epoch": 13.031394275161588, "grad_norm": 0.3628042936325073, "learning_rate": 2.118e-05, "loss": 0.0311, "step": 7063 }, { "epoch": 13.033240997229917, "grad_norm": 0.3977918028831482, "learning_rate": 2.1183e-05, "loss": 0.0295, "step": 7064 }, { "epoch": 13.035087719298245, "grad_norm": 0.43366360664367676, "learning_rate": 2.1186000000000003e-05, "loss": 0.0235, "step": 7065 }, { "epoch": 13.036934441366574, "grad_norm": 0.20432913303375244, "learning_rate": 2.1189000000000003e-05, "loss": 0.0141, "step": 7066 }, { "epoch": 13.038781163434903, "grad_norm": 0.41155481338500977, "learning_rate": 2.1192e-05, "loss": 0.0175, "step": 7067 }, { "epoch": 13.040627885503232, "grad_norm": 0.5188421010971069, "learning_rate": 2.1195e-05, "loss": 0.0143, "step": 7068 }, { "epoch": 13.04247460757156, "grad_norm": 0.3438571095466614, "learning_rate": 2.1198e-05, "loss": 0.0173, "step": 7069 }, { "epoch": 13.044321329639889, "grad_norm": 0.38158324360847473, "learning_rate": 2.1201e-05, "loss": 0.0154, "step": 7070 }, { "epoch": 13.046168051708218, "grad_norm": 0.4931504726409912, "learning_rate": 2.1204e-05, "loss": 0.0211, "step": 7071 }, { "epoch": 13.048014773776547, "grad_norm": 0.6768643856048584, "learning_rate": 2.1207e-05, "loss": 0.0446, "step": 7072 }, { "epoch": 13.049861495844876, "grad_norm": 0.2771964967250824, "learning_rate": 2.121e-05, "loss": 0.0146, "step": 7073 }, { "epoch": 13.051708217913204, "grad_norm": 0.30338409543037415, "learning_rate": 2.1213e-05, "loss": 0.0146, "step": 7074 }, { "epoch": 13.053554939981533, "grad_norm": 0.22734993696212769, "learning_rate": 2.1216e-05, "loss": 0.012, "step": 7075 }, { "epoch": 13.055401662049862, "grad_norm": 0.43263518810272217, "learning_rate": 2.1219e-05, "loss": 0.0182, "step": 7076 }, { "epoch": 13.057248384118191, "grad_norm": 0.44592079520225525, "learning_rate": 2.1222e-05, "loss": 0.0124, "step": 7077 }, { "epoch": 13.059095106186518, "grad_norm": 0.2170630544424057, "learning_rate": 2.1225e-05, "loss": 0.0134, "step": 7078 }, { "epoch": 13.060941828254848, "grad_norm": 0.2715395987033844, "learning_rate": 2.1228e-05, "loss": 0.0108, "step": 7079 }, { "epoch": 13.062788550323177, "grad_norm": 0.3276883661746979, "learning_rate": 2.1231e-05, "loss": 0.0117, "step": 7080 }, { "epoch": 13.064635272391506, "grad_norm": 0.3050946593284607, "learning_rate": 2.1234e-05, "loss": 0.0141, "step": 7081 }, { "epoch": 13.066481994459833, "grad_norm": 0.3024367392063141, "learning_rate": 2.1237e-05, "loss": 0.0113, "step": 7082 }, { "epoch": 13.068328716528162, "grad_norm": 0.40349775552749634, "learning_rate": 2.124e-05, "loss": 0.0106, "step": 7083 }, { "epoch": 13.070175438596491, "grad_norm": 0.16834889352321625, "learning_rate": 2.1243e-05, "loss": 0.0054, "step": 7084 }, { "epoch": 13.07202216066482, "grad_norm": 0.3100760579109192, "learning_rate": 2.1246000000000003e-05, "loss": 0.0127, "step": 7085 }, { "epoch": 13.073868882733148, "grad_norm": 0.5087378621101379, "learning_rate": 2.1249000000000003e-05, "loss": 0.0306, "step": 7086 }, { "epoch": 13.075715604801477, "grad_norm": 0.5137645602226257, "learning_rate": 2.1252000000000003e-05, "loss": 0.0128, "step": 7087 }, { "epoch": 13.077562326869806, "grad_norm": 0.4320560395717621, "learning_rate": 2.1255000000000002e-05, "loss": 0.0168, "step": 7088 }, { "epoch": 13.079409048938135, "grad_norm": 0.3999880254268646, "learning_rate": 2.1258000000000002e-05, "loss": 0.0185, "step": 7089 }, { "epoch": 13.081255771006463, "grad_norm": 0.31367284059524536, "learning_rate": 2.1261000000000002e-05, "loss": 0.0126, "step": 7090 }, { "epoch": 13.083102493074792, "grad_norm": 0.49617084860801697, "learning_rate": 2.1264000000000002e-05, "loss": 0.0184, "step": 7091 }, { "epoch": 13.084949215143121, "grad_norm": 0.44734519720077515, "learning_rate": 2.1266999999999998e-05, "loss": 0.0105, "step": 7092 }, { "epoch": 13.08679593721145, "grad_norm": 0.5911281704902649, "learning_rate": 2.1269999999999998e-05, "loss": 0.0214, "step": 7093 }, { "epoch": 13.088642659279778, "grad_norm": 1.7160379886627197, "learning_rate": 2.1272999999999998e-05, "loss": 0.0247, "step": 7094 }, { "epoch": 13.090489381348107, "grad_norm": 0.3841411769390106, "learning_rate": 2.1276e-05, "loss": 0.0156, "step": 7095 }, { "epoch": 13.092336103416436, "grad_norm": 0.5357542037963867, "learning_rate": 2.1279e-05, "loss": 0.0416, "step": 7096 }, { "epoch": 13.094182825484765, "grad_norm": 0.9648202061653137, "learning_rate": 2.1282e-05, "loss": 0.3002, "step": 7097 }, { "epoch": 13.096029547553094, "grad_norm": 0.6870272159576416, "learning_rate": 2.1285e-05, "loss": 0.2282, "step": 7098 }, { "epoch": 13.097876269621421, "grad_norm": 0.5585189461708069, "learning_rate": 2.1288e-05, "loss": 0.156, "step": 7099 }, { "epoch": 13.09972299168975, "grad_norm": 0.7169678211212158, "learning_rate": 2.1291e-05, "loss": 0.1415, "step": 7100 }, { "epoch": 13.10156971375808, "grad_norm": 0.5396972298622131, "learning_rate": 2.1294e-05, "loss": 0.1254, "step": 7101 }, { "epoch": 13.103416435826409, "grad_norm": 0.6024976372718811, "learning_rate": 2.1297e-05, "loss": 0.1497, "step": 7102 }, { "epoch": 13.105263157894736, "grad_norm": 0.5685150027275085, "learning_rate": 2.13e-05, "loss": 0.116, "step": 7103 }, { "epoch": 13.107109879963065, "grad_norm": 0.4817744195461273, "learning_rate": 2.1303e-05, "loss": 0.1146, "step": 7104 }, { "epoch": 13.108956602031395, "grad_norm": 0.6075971126556396, "learning_rate": 2.1306000000000002e-05, "loss": 0.0902, "step": 7105 }, { "epoch": 13.110803324099724, "grad_norm": 0.4989315867424011, "learning_rate": 2.1309000000000002e-05, "loss": 0.0883, "step": 7106 }, { "epoch": 13.112650046168051, "grad_norm": 0.923866331577301, "learning_rate": 2.1312000000000002e-05, "loss": 0.0684, "step": 7107 }, { "epoch": 13.11449676823638, "grad_norm": 0.3913983106613159, "learning_rate": 2.1315000000000002e-05, "loss": 0.044, "step": 7108 }, { "epoch": 13.11634349030471, "grad_norm": 0.3887367248535156, "learning_rate": 2.1318e-05, "loss": 0.0533, "step": 7109 }, { "epoch": 13.118190212373039, "grad_norm": 0.36022719740867615, "learning_rate": 2.1321e-05, "loss": 0.0187, "step": 7110 }, { "epoch": 13.120036934441366, "grad_norm": 0.38603368401527405, "learning_rate": 2.1324e-05, "loss": 0.0578, "step": 7111 }, { "epoch": 13.121883656509695, "grad_norm": 0.75755375623703, "learning_rate": 2.1327e-05, "loss": 0.0812, "step": 7112 }, { "epoch": 13.123730378578024, "grad_norm": 0.31779199838638306, "learning_rate": 2.133e-05, "loss": 0.0181, "step": 7113 }, { "epoch": 13.125577100646353, "grad_norm": 0.6031693816184998, "learning_rate": 2.1333e-05, "loss": 0.0439, "step": 7114 }, { "epoch": 13.12742382271468, "grad_norm": 0.27690422534942627, "learning_rate": 2.1336000000000004e-05, "loss": 0.0171, "step": 7115 }, { "epoch": 13.12927054478301, "grad_norm": 0.3437330722808838, "learning_rate": 2.1339e-05, "loss": 0.013, "step": 7116 }, { "epoch": 13.131117266851339, "grad_norm": 0.5207774639129639, "learning_rate": 2.1342e-05, "loss": 0.0223, "step": 7117 }, { "epoch": 13.132963988919668, "grad_norm": 0.2619399130344391, "learning_rate": 2.1345e-05, "loss": 0.0232, "step": 7118 }, { "epoch": 13.134810710987995, "grad_norm": 0.24779574573040009, "learning_rate": 2.1348e-05, "loss": 0.0102, "step": 7119 }, { "epoch": 13.136657433056325, "grad_norm": 0.28220513463020325, "learning_rate": 2.1351e-05, "loss": 0.0174, "step": 7120 }, { "epoch": 13.138504155124654, "grad_norm": 0.3668232262134552, "learning_rate": 2.1354e-05, "loss": 0.02, "step": 7121 }, { "epoch": 13.140350877192983, "grad_norm": 0.27169662714004517, "learning_rate": 2.1357e-05, "loss": 0.0156, "step": 7122 }, { "epoch": 13.142197599261312, "grad_norm": 0.33091557025909424, "learning_rate": 2.136e-05, "loss": 0.015, "step": 7123 }, { "epoch": 13.14404432132964, "grad_norm": 0.5022887587547302, "learning_rate": 2.1363e-05, "loss": 0.0175, "step": 7124 }, { "epoch": 13.145891043397969, "grad_norm": 0.7188720107078552, "learning_rate": 2.1366000000000002e-05, "loss": 0.0187, "step": 7125 }, { "epoch": 13.147737765466298, "grad_norm": 0.5348430275917053, "learning_rate": 2.1369e-05, "loss": 0.019, "step": 7126 }, { "epoch": 13.149584487534627, "grad_norm": 0.34337300062179565, "learning_rate": 2.1372e-05, "loss": 0.0137, "step": 7127 }, { "epoch": 13.151431209602954, "grad_norm": 0.32131054997444153, "learning_rate": 2.1375e-05, "loss": 0.024, "step": 7128 }, { "epoch": 13.153277931671283, "grad_norm": 0.3087523877620697, "learning_rate": 2.1378e-05, "loss": 0.0113, "step": 7129 }, { "epoch": 13.155124653739612, "grad_norm": 0.23587988317012787, "learning_rate": 2.1381e-05, "loss": 0.0122, "step": 7130 }, { "epoch": 13.156971375807942, "grad_norm": 0.6149274110794067, "learning_rate": 2.1384e-05, "loss": 0.0207, "step": 7131 }, { "epoch": 13.158818097876269, "grad_norm": 0.366910457611084, "learning_rate": 2.1387e-05, "loss": 0.0134, "step": 7132 }, { "epoch": 13.160664819944598, "grad_norm": 0.4519480764865875, "learning_rate": 2.139e-05, "loss": 0.016, "step": 7133 }, { "epoch": 13.162511542012927, "grad_norm": 0.4220968186855316, "learning_rate": 2.1393e-05, "loss": 0.0148, "step": 7134 }, { "epoch": 13.164358264081256, "grad_norm": 0.5174040794372559, "learning_rate": 2.1396e-05, "loss": 0.0133, "step": 7135 }, { "epoch": 13.166204986149584, "grad_norm": 0.415140837430954, "learning_rate": 2.1399000000000003e-05, "loss": 0.0143, "step": 7136 }, { "epoch": 13.168051708217913, "grad_norm": 0.2746680974960327, "learning_rate": 2.1402000000000003e-05, "loss": 0.0097, "step": 7137 }, { "epoch": 13.169898430286242, "grad_norm": 0.5087571740150452, "learning_rate": 2.1405000000000003e-05, "loss": 0.0202, "step": 7138 }, { "epoch": 13.171745152354571, "grad_norm": 0.3475409746170044, "learning_rate": 2.1408000000000002e-05, "loss": 0.0182, "step": 7139 }, { "epoch": 13.173591874422899, "grad_norm": 0.6303392648696899, "learning_rate": 2.1411000000000002e-05, "loss": 0.0145, "step": 7140 }, { "epoch": 13.175438596491228, "grad_norm": 0.26322299242019653, "learning_rate": 2.1414e-05, "loss": 0.0111, "step": 7141 }, { "epoch": 13.177285318559557, "grad_norm": 0.3473210334777832, "learning_rate": 2.1417e-05, "loss": 0.0099, "step": 7142 }, { "epoch": 13.179132040627886, "grad_norm": 1.0462384223937988, "learning_rate": 2.1419999999999998e-05, "loss": 0.0269, "step": 7143 }, { "epoch": 13.180978762696213, "grad_norm": 0.7094504237174988, "learning_rate": 2.1422999999999998e-05, "loss": 0.0192, "step": 7144 }, { "epoch": 13.182825484764543, "grad_norm": 0.990999162197113, "learning_rate": 2.1425999999999998e-05, "loss": 0.0203, "step": 7145 }, { "epoch": 13.184672206832872, "grad_norm": 0.9668733477592468, "learning_rate": 2.1429e-05, "loss": 0.0425, "step": 7146 }, { "epoch": 13.1865189289012, "grad_norm": 0.7852587699890137, "learning_rate": 2.1432e-05, "loss": 0.2142, "step": 7147 }, { "epoch": 13.18836565096953, "grad_norm": 0.675623893737793, "learning_rate": 2.1435e-05, "loss": 0.162, "step": 7148 }, { "epoch": 13.190212373037857, "grad_norm": 0.6665027737617493, "learning_rate": 2.1438e-05, "loss": 0.1618, "step": 7149 }, { "epoch": 13.192059095106186, "grad_norm": 0.7854028940200806, "learning_rate": 2.1441e-05, "loss": 0.1313, "step": 7150 }, { "epoch": 13.193905817174516, "grad_norm": 0.775463342666626, "learning_rate": 2.1444e-05, "loss": 0.1387, "step": 7151 }, { "epoch": 13.195752539242845, "grad_norm": 3.532989501953125, "learning_rate": 2.1447e-05, "loss": 0.1338, "step": 7152 }, { "epoch": 13.197599261311172, "grad_norm": 0.5899909734725952, "learning_rate": 2.145e-05, "loss": 0.1463, "step": 7153 }, { "epoch": 13.199445983379501, "grad_norm": 0.4959987699985504, "learning_rate": 2.1453e-05, "loss": 0.0921, "step": 7154 }, { "epoch": 13.20129270544783, "grad_norm": 0.7340102195739746, "learning_rate": 2.1456e-05, "loss": 0.1113, "step": 7155 }, { "epoch": 13.20313942751616, "grad_norm": 0.8091896772384644, "learning_rate": 2.1459000000000002e-05, "loss": 0.0996, "step": 7156 }, { "epoch": 13.204986149584487, "grad_norm": 0.4246945381164551, "learning_rate": 2.1462000000000002e-05, "loss": 0.0573, "step": 7157 }, { "epoch": 13.206832871652816, "grad_norm": 0.4794583320617676, "learning_rate": 2.1465000000000002e-05, "loss": 0.0522, "step": 7158 }, { "epoch": 13.208679593721145, "grad_norm": 0.8014870882034302, "learning_rate": 2.1468000000000002e-05, "loss": 0.11, "step": 7159 }, { "epoch": 13.210526315789474, "grad_norm": 1.3814865350723267, "learning_rate": 2.1471e-05, "loss": 0.0724, "step": 7160 }, { "epoch": 13.212373037857802, "grad_norm": 1.1732836961746216, "learning_rate": 2.1474e-05, "loss": 0.0517, "step": 7161 }, { "epoch": 13.21421975992613, "grad_norm": 0.3207579553127289, "learning_rate": 2.1477e-05, "loss": 0.0429, "step": 7162 }, { "epoch": 13.21606648199446, "grad_norm": 0.4469239115715027, "learning_rate": 2.148e-05, "loss": 0.023, "step": 7163 }, { "epoch": 13.21791320406279, "grad_norm": 0.45449766516685486, "learning_rate": 2.1483e-05, "loss": 0.028, "step": 7164 }, { "epoch": 13.219759926131117, "grad_norm": 0.23852013051509857, "learning_rate": 2.1486e-05, "loss": 0.0117, "step": 7165 }, { "epoch": 13.221606648199446, "grad_norm": 0.20696599781513214, "learning_rate": 2.1489e-05, "loss": 0.0132, "step": 7166 }, { "epoch": 13.223453370267775, "grad_norm": 0.30197465419769287, "learning_rate": 2.1492e-05, "loss": 0.0111, "step": 7167 }, { "epoch": 13.225300092336104, "grad_norm": 0.2798866927623749, "learning_rate": 2.1495e-05, "loss": 0.0177, "step": 7168 }, { "epoch": 13.227146814404431, "grad_norm": 0.457897812128067, "learning_rate": 2.1498e-05, "loss": 0.0312, "step": 7169 }, { "epoch": 13.22899353647276, "grad_norm": 0.4140821397304535, "learning_rate": 2.1501e-05, "loss": 0.0123, "step": 7170 }, { "epoch": 13.23084025854109, "grad_norm": 0.6828473210334778, "learning_rate": 2.1504e-05, "loss": 0.025, "step": 7171 }, { "epoch": 13.232686980609419, "grad_norm": 0.23633523285388947, "learning_rate": 2.1507e-05, "loss": 0.0126, "step": 7172 }, { "epoch": 13.234533702677748, "grad_norm": 0.2687138319015503, "learning_rate": 2.151e-05, "loss": 0.0107, "step": 7173 }, { "epoch": 13.236380424746075, "grad_norm": 0.36586427688598633, "learning_rate": 2.1513e-05, "loss": 0.0112, "step": 7174 }, { "epoch": 13.238227146814404, "grad_norm": 0.40713468194007874, "learning_rate": 2.1516e-05, "loss": 0.0231, "step": 7175 }, { "epoch": 13.240073868882734, "grad_norm": 0.41447657346725464, "learning_rate": 2.1519000000000002e-05, "loss": 0.0454, "step": 7176 }, { "epoch": 13.241920590951063, "grad_norm": 0.6310411095619202, "learning_rate": 2.1522e-05, "loss": 0.023, "step": 7177 }, { "epoch": 13.24376731301939, "grad_norm": 0.3591703474521637, "learning_rate": 2.1525e-05, "loss": 0.0129, "step": 7178 }, { "epoch": 13.24561403508772, "grad_norm": 0.44197985529899597, "learning_rate": 2.1528e-05, "loss": 0.017, "step": 7179 }, { "epoch": 13.247460757156048, "grad_norm": 0.30777108669281006, "learning_rate": 2.1531e-05, "loss": 0.0164, "step": 7180 }, { "epoch": 13.249307479224377, "grad_norm": 0.3263159394264221, "learning_rate": 2.1534e-05, "loss": 0.0094, "step": 7181 }, { "epoch": 13.251154201292705, "grad_norm": 0.9163006544113159, "learning_rate": 2.1537e-05, "loss": 0.024, "step": 7182 }, { "epoch": 13.253000923361034, "grad_norm": 0.7877883911132812, "learning_rate": 2.154e-05, "loss": 0.0397, "step": 7183 }, { "epoch": 13.254847645429363, "grad_norm": 0.2887888252735138, "learning_rate": 2.1543e-05, "loss": 0.0111, "step": 7184 }, { "epoch": 13.256694367497692, "grad_norm": 0.4857921600341797, "learning_rate": 2.1546e-05, "loss": 0.0288, "step": 7185 }, { "epoch": 13.25854108956602, "grad_norm": 0.47236984968185425, "learning_rate": 2.1549000000000003e-05, "loss": 0.0161, "step": 7186 }, { "epoch": 13.260387811634349, "grad_norm": 0.30887550115585327, "learning_rate": 2.1552000000000003e-05, "loss": 0.0108, "step": 7187 }, { "epoch": 13.262234533702678, "grad_norm": 0.5033847689628601, "learning_rate": 2.1555000000000003e-05, "loss": 0.0173, "step": 7188 }, { "epoch": 13.264081255771007, "grad_norm": 0.3925565779209137, "learning_rate": 2.1558000000000003e-05, "loss": 0.0102, "step": 7189 }, { "epoch": 13.265927977839334, "grad_norm": 0.46677395701408386, "learning_rate": 2.1561e-05, "loss": 0.0185, "step": 7190 }, { "epoch": 13.267774699907664, "grad_norm": 0.3494510054588318, "learning_rate": 2.1564e-05, "loss": 0.0166, "step": 7191 }, { "epoch": 13.269621421975993, "grad_norm": 0.397417813539505, "learning_rate": 2.1567e-05, "loss": 0.0196, "step": 7192 }, { "epoch": 13.271468144044322, "grad_norm": 0.44307324290275574, "learning_rate": 2.157e-05, "loss": 0.0196, "step": 7193 }, { "epoch": 13.27331486611265, "grad_norm": 0.5888022184371948, "learning_rate": 2.1572999999999998e-05, "loss": 0.0492, "step": 7194 }, { "epoch": 13.275161588180978, "grad_norm": 0.4758075773715973, "learning_rate": 2.1575999999999998e-05, "loss": 0.0157, "step": 7195 }, { "epoch": 13.277008310249307, "grad_norm": 0.3339407742023468, "learning_rate": 2.1579e-05, "loss": 0.0157, "step": 7196 }, { "epoch": 13.278855032317637, "grad_norm": 1.0655595064163208, "learning_rate": 2.1582e-05, "loss": 0.2268, "step": 7197 }, { "epoch": 13.280701754385966, "grad_norm": 1.2859565019607544, "learning_rate": 2.1585e-05, "loss": 0.1929, "step": 7198 }, { "epoch": 13.282548476454293, "grad_norm": 0.6945502161979675, "learning_rate": 2.1588e-05, "loss": 0.1583, "step": 7199 }, { "epoch": 13.284395198522622, "grad_norm": 0.5635412335395813, "learning_rate": 2.1591e-05, "loss": 0.1227, "step": 7200 }, { "epoch": 13.286241920590951, "grad_norm": 0.710004448890686, "learning_rate": 2.1594e-05, "loss": 0.1405, "step": 7201 }, { "epoch": 13.28808864265928, "grad_norm": 0.8793792724609375, "learning_rate": 2.1597e-05, "loss": 0.1154, "step": 7202 }, { "epoch": 13.289935364727608, "grad_norm": 0.5695134401321411, "learning_rate": 2.16e-05, "loss": 0.0923, "step": 7203 }, { "epoch": 13.291782086795937, "grad_norm": 0.5201019048690796, "learning_rate": 2.1603e-05, "loss": 0.0729, "step": 7204 }, { "epoch": 13.293628808864266, "grad_norm": 0.8120662569999695, "learning_rate": 2.1606e-05, "loss": 0.1176, "step": 7205 }, { "epoch": 13.295475530932595, "grad_norm": 0.5279056429862976, "learning_rate": 2.1609000000000003e-05, "loss": 0.0907, "step": 7206 }, { "epoch": 13.297322253000923, "grad_norm": 0.9664171934127808, "learning_rate": 2.1612000000000002e-05, "loss": 0.1301, "step": 7207 }, { "epoch": 13.299168975069252, "grad_norm": 2.4879748821258545, "learning_rate": 2.1615000000000002e-05, "loss": 0.0676, "step": 7208 }, { "epoch": 13.301015697137581, "grad_norm": 0.6646405458450317, "learning_rate": 2.1618000000000002e-05, "loss": 0.1167, "step": 7209 }, { "epoch": 13.30286241920591, "grad_norm": 0.37683454155921936, "learning_rate": 2.1621000000000002e-05, "loss": 0.047, "step": 7210 }, { "epoch": 13.304709141274238, "grad_norm": 0.31387007236480713, "learning_rate": 2.1624e-05, "loss": 0.0184, "step": 7211 }, { "epoch": 13.306555863342567, "grad_norm": 0.2672097384929657, "learning_rate": 2.1627e-05, "loss": 0.0225, "step": 7212 }, { "epoch": 13.308402585410896, "grad_norm": 0.40251773595809937, "learning_rate": 2.163e-05, "loss": 0.0284, "step": 7213 }, { "epoch": 13.310249307479225, "grad_norm": 0.3544987142086029, "learning_rate": 2.1633e-05, "loss": 0.0207, "step": 7214 }, { "epoch": 13.312096029547552, "grad_norm": 0.33679115772247314, "learning_rate": 2.1635999999999997e-05, "loss": 0.0214, "step": 7215 }, { "epoch": 13.313942751615881, "grad_norm": 0.43973714113235474, "learning_rate": 2.1639e-05, "loss": 0.0154, "step": 7216 }, { "epoch": 13.31578947368421, "grad_norm": 0.3998846113681793, "learning_rate": 2.1642e-05, "loss": 0.0215, "step": 7217 }, { "epoch": 13.31763619575254, "grad_norm": 0.6689403057098389, "learning_rate": 2.1645e-05, "loss": 0.0149, "step": 7218 }, { "epoch": 13.319482917820867, "grad_norm": 0.6973395347595215, "learning_rate": 2.1648e-05, "loss": 0.023, "step": 7219 }, { "epoch": 13.321329639889196, "grad_norm": 0.6347224116325378, "learning_rate": 2.1651e-05, "loss": 0.0153, "step": 7220 }, { "epoch": 13.323176361957525, "grad_norm": 0.4243088662624359, "learning_rate": 2.1654e-05, "loss": 0.0161, "step": 7221 }, { "epoch": 13.325023084025855, "grad_norm": 0.4804117679595947, "learning_rate": 2.1657e-05, "loss": 0.0168, "step": 7222 }, { "epoch": 13.326869806094184, "grad_norm": 0.36143893003463745, "learning_rate": 2.166e-05, "loss": 0.0126, "step": 7223 }, { "epoch": 13.328716528162511, "grad_norm": 0.32996055483818054, "learning_rate": 2.1663e-05, "loss": 0.0167, "step": 7224 }, { "epoch": 13.33056325023084, "grad_norm": 0.2420320361852646, "learning_rate": 2.1666e-05, "loss": 0.011, "step": 7225 }, { "epoch": 13.33240997229917, "grad_norm": 0.42439308762550354, "learning_rate": 2.1669000000000002e-05, "loss": 0.0158, "step": 7226 }, { "epoch": 13.334256694367498, "grad_norm": 0.28058120608329773, "learning_rate": 2.1672000000000002e-05, "loss": 0.0122, "step": 7227 }, { "epoch": 13.336103416435826, "grad_norm": 0.2649085819721222, "learning_rate": 2.1675e-05, "loss": 0.0115, "step": 7228 }, { "epoch": 13.337950138504155, "grad_norm": 0.6683909893035889, "learning_rate": 2.1678e-05, "loss": 0.0249, "step": 7229 }, { "epoch": 13.339796860572484, "grad_norm": 0.40081146359443665, "learning_rate": 2.1681e-05, "loss": 0.0275, "step": 7230 }, { "epoch": 13.341643582640813, "grad_norm": 0.8051175475120544, "learning_rate": 2.1684e-05, "loss": 0.0199, "step": 7231 }, { "epoch": 13.34349030470914, "grad_norm": 0.21623077988624573, "learning_rate": 2.1687e-05, "loss": 0.0095, "step": 7232 }, { "epoch": 13.34533702677747, "grad_norm": 0.4252525269985199, "learning_rate": 2.169e-05, "loss": 0.0209, "step": 7233 }, { "epoch": 13.347183748845799, "grad_norm": 0.5248692035675049, "learning_rate": 2.1693e-05, "loss": 0.0206, "step": 7234 }, { "epoch": 13.349030470914128, "grad_norm": 16.779129028320312, "learning_rate": 2.1696e-05, "loss": 0.0261, "step": 7235 }, { "epoch": 13.350877192982455, "grad_norm": 0.3974554240703583, "learning_rate": 2.1699000000000003e-05, "loss": 0.0227, "step": 7236 }, { "epoch": 13.352723915050785, "grad_norm": 0.27056658267974854, "learning_rate": 2.1702000000000003e-05, "loss": 0.0111, "step": 7237 }, { "epoch": 13.354570637119114, "grad_norm": 0.42890921235084534, "learning_rate": 2.1705000000000003e-05, "loss": 0.0213, "step": 7238 }, { "epoch": 13.356417359187443, "grad_norm": 1.2918071746826172, "learning_rate": 2.1708e-05, "loss": 0.0128, "step": 7239 }, { "epoch": 13.35826408125577, "grad_norm": 0.5146956443786621, "learning_rate": 2.1711e-05, "loss": 0.0131, "step": 7240 }, { "epoch": 13.3601108033241, "grad_norm": 0.8489638566970825, "learning_rate": 2.1714e-05, "loss": 0.0167, "step": 7241 }, { "epoch": 13.361957525392429, "grad_norm": 1.5672495365142822, "learning_rate": 2.1717e-05, "loss": 0.0208, "step": 7242 }, { "epoch": 13.363804247460758, "grad_norm": 0.9432560801506042, "learning_rate": 2.172e-05, "loss": 0.0226, "step": 7243 }, { "epoch": 13.365650969529085, "grad_norm": 0.821049153804779, "learning_rate": 2.1723e-05, "loss": 0.0224, "step": 7244 }, { "epoch": 13.367497691597414, "grad_norm": 0.4392554759979248, "learning_rate": 2.1726e-05, "loss": 0.0179, "step": 7245 }, { "epoch": 13.369344413665743, "grad_norm": 0.5630433559417725, "learning_rate": 2.1729e-05, "loss": 0.0096, "step": 7246 }, { "epoch": 13.371191135734072, "grad_norm": 0.8121259212493896, "learning_rate": 2.1732e-05, "loss": 0.1936, "step": 7247 }, { "epoch": 13.373037857802402, "grad_norm": 2.499257802963257, "learning_rate": 2.1735e-05, "loss": 0.3012, "step": 7248 }, { "epoch": 13.374884579870729, "grad_norm": 0.7789328098297119, "learning_rate": 2.1738e-05, "loss": 0.1467, "step": 7249 }, { "epoch": 13.376731301939058, "grad_norm": 0.8124067783355713, "learning_rate": 2.1741e-05, "loss": 0.1901, "step": 7250 }, { "epoch": 13.378578024007387, "grad_norm": 0.9140483140945435, "learning_rate": 2.1744e-05, "loss": 0.1848, "step": 7251 }, { "epoch": 13.380424746075716, "grad_norm": 1.3085297346115112, "learning_rate": 2.1747e-05, "loss": 0.1444, "step": 7252 }, { "epoch": 13.382271468144044, "grad_norm": 0.7353108525276184, "learning_rate": 2.175e-05, "loss": 0.119, "step": 7253 }, { "epoch": 13.384118190212373, "grad_norm": 0.7093847990036011, "learning_rate": 2.1753e-05, "loss": 0.1582, "step": 7254 }, { "epoch": 13.385964912280702, "grad_norm": 0.660713791847229, "learning_rate": 2.1756e-05, "loss": 0.1042, "step": 7255 }, { "epoch": 13.387811634349031, "grad_norm": 0.53863126039505, "learning_rate": 2.1759e-05, "loss": 0.0789, "step": 7256 }, { "epoch": 13.389658356417359, "grad_norm": 0.437294602394104, "learning_rate": 2.1762000000000003e-05, "loss": 0.0701, "step": 7257 }, { "epoch": 13.391505078485688, "grad_norm": 0.5495612621307373, "learning_rate": 2.1765000000000003e-05, "loss": 0.0459, "step": 7258 }, { "epoch": 13.393351800554017, "grad_norm": 0.43721771240234375, "learning_rate": 2.1768000000000002e-05, "loss": 0.0419, "step": 7259 }, { "epoch": 13.395198522622346, "grad_norm": 0.65282142162323, "learning_rate": 2.1771000000000002e-05, "loss": 0.0607, "step": 7260 }, { "epoch": 13.397045244690673, "grad_norm": 0.4140559732913971, "learning_rate": 2.1774000000000002e-05, "loss": 0.0373, "step": 7261 }, { "epoch": 13.398891966759003, "grad_norm": 0.6380917429924011, "learning_rate": 2.1777000000000002e-05, "loss": 0.0588, "step": 7262 }, { "epoch": 13.400738688827332, "grad_norm": 0.4607631266117096, "learning_rate": 2.178e-05, "loss": 0.0628, "step": 7263 }, { "epoch": 13.40258541089566, "grad_norm": 0.7542992234230042, "learning_rate": 2.1782999999999998e-05, "loss": 0.0248, "step": 7264 }, { "epoch": 13.404432132963988, "grad_norm": 0.5851745009422302, "learning_rate": 2.1785999999999998e-05, "loss": 0.046, "step": 7265 }, { "epoch": 13.406278855032317, "grad_norm": 0.7884859442710876, "learning_rate": 2.1788999999999998e-05, "loss": 0.0225, "step": 7266 }, { "epoch": 13.408125577100646, "grad_norm": 0.38143467903137207, "learning_rate": 2.1792e-05, "loss": 0.0255, "step": 7267 }, { "epoch": 13.409972299168976, "grad_norm": 0.5632845759391785, "learning_rate": 2.1795e-05, "loss": 0.0206, "step": 7268 }, { "epoch": 13.411819021237303, "grad_norm": 0.4236559271812439, "learning_rate": 2.1798e-05, "loss": 0.038, "step": 7269 }, { "epoch": 13.413665743305632, "grad_norm": 0.43563246726989746, "learning_rate": 2.1801e-05, "loss": 0.0261, "step": 7270 }, { "epoch": 13.415512465373961, "grad_norm": 0.2808643579483032, "learning_rate": 2.1804e-05, "loss": 0.0161, "step": 7271 }, { "epoch": 13.41735918744229, "grad_norm": 0.45650675892829895, "learning_rate": 2.1807e-05, "loss": 0.0196, "step": 7272 }, { "epoch": 13.41920590951062, "grad_norm": 0.24618905782699585, "learning_rate": 2.181e-05, "loss": 0.0124, "step": 7273 }, { "epoch": 13.421052631578947, "grad_norm": 0.2942012846469879, "learning_rate": 2.1813e-05, "loss": 0.0202, "step": 7274 }, { "epoch": 13.422899353647276, "grad_norm": 0.26131290197372437, "learning_rate": 2.1816e-05, "loss": 0.0192, "step": 7275 }, { "epoch": 13.424746075715605, "grad_norm": 0.3145621418952942, "learning_rate": 2.1819e-05, "loss": 0.013, "step": 7276 }, { "epoch": 13.426592797783934, "grad_norm": 0.48726406693458557, "learning_rate": 2.1822000000000002e-05, "loss": 0.0152, "step": 7277 }, { "epoch": 13.428439519852262, "grad_norm": 0.4565317928791046, "learning_rate": 2.1825000000000002e-05, "loss": 0.0123, "step": 7278 }, { "epoch": 13.43028624192059, "grad_norm": 0.48445793986320496, "learning_rate": 2.1828000000000002e-05, "loss": 0.0154, "step": 7279 }, { "epoch": 13.43213296398892, "grad_norm": 0.3091747760772705, "learning_rate": 2.1831e-05, "loss": 0.0192, "step": 7280 }, { "epoch": 13.43397968605725, "grad_norm": 1.0693906545639038, "learning_rate": 2.1834e-05, "loss": 0.0188, "step": 7281 }, { "epoch": 13.435826408125576, "grad_norm": 0.3439318537712097, "learning_rate": 2.1837e-05, "loss": 0.0174, "step": 7282 }, { "epoch": 13.437673130193906, "grad_norm": 0.8398624658584595, "learning_rate": 2.184e-05, "loss": 0.0232, "step": 7283 }, { "epoch": 13.439519852262235, "grad_norm": 0.5656768679618835, "learning_rate": 2.1843e-05, "loss": 0.0239, "step": 7284 }, { "epoch": 13.441366574330564, "grad_norm": 0.8627192974090576, "learning_rate": 2.1846e-05, "loss": 0.0174, "step": 7285 }, { "epoch": 13.443213296398891, "grad_norm": 0.40234649181365967, "learning_rate": 2.1849e-05, "loss": 0.0186, "step": 7286 }, { "epoch": 13.44506001846722, "grad_norm": 0.26516568660736084, "learning_rate": 2.1852000000000004e-05, "loss": 0.0147, "step": 7287 }, { "epoch": 13.44690674053555, "grad_norm": 0.7071975469589233, "learning_rate": 2.1855e-05, "loss": 0.0153, "step": 7288 }, { "epoch": 13.448753462603879, "grad_norm": 1.1829360723495483, "learning_rate": 2.1858e-05, "loss": 0.025, "step": 7289 }, { "epoch": 13.450600184672206, "grad_norm": 0.37632063031196594, "learning_rate": 2.1861e-05, "loss": 0.023, "step": 7290 }, { "epoch": 13.452446906740535, "grad_norm": 0.29822513461112976, "learning_rate": 2.1864e-05, "loss": 0.013, "step": 7291 }, { "epoch": 13.454293628808864, "grad_norm": 0.3867357671260834, "learning_rate": 2.1867e-05, "loss": 0.0174, "step": 7292 }, { "epoch": 13.456140350877194, "grad_norm": 0.380867600440979, "learning_rate": 2.187e-05, "loss": 0.0137, "step": 7293 }, { "epoch": 13.45798707294552, "grad_norm": 0.4950351417064667, "learning_rate": 2.1873e-05, "loss": 0.0166, "step": 7294 }, { "epoch": 13.45983379501385, "grad_norm": 1.1106423139572144, "learning_rate": 2.1876e-05, "loss": 0.0274, "step": 7295 }, { "epoch": 13.46168051708218, "grad_norm": 0.5773835778236389, "learning_rate": 2.1879e-05, "loss": 0.0375, "step": 7296 }, { "epoch": 13.463527239150508, "grad_norm": 1.07211434841156, "learning_rate": 2.1882e-05, "loss": 0.232, "step": 7297 }, { "epoch": 13.465373961218837, "grad_norm": 0.7582486867904663, "learning_rate": 2.1885e-05, "loss": 0.1851, "step": 7298 }, { "epoch": 13.467220683287165, "grad_norm": 0.8717134594917297, "learning_rate": 2.1888e-05, "loss": 0.1718, "step": 7299 }, { "epoch": 13.469067405355494, "grad_norm": 0.5966388583183289, "learning_rate": 2.1891e-05, "loss": 0.1342, "step": 7300 }, { "epoch": 13.470914127423823, "grad_norm": 0.7327893972396851, "learning_rate": 2.1894e-05, "loss": 0.1264, "step": 7301 }, { "epoch": 13.472760849492152, "grad_norm": 0.8322132229804993, "learning_rate": 2.1897e-05, "loss": 0.1263, "step": 7302 }, { "epoch": 13.47460757156048, "grad_norm": 0.6498303413391113, "learning_rate": 2.19e-05, "loss": 0.0919, "step": 7303 }, { "epoch": 13.476454293628809, "grad_norm": 0.9275102019309998, "learning_rate": 2.1903e-05, "loss": 0.1055, "step": 7304 }, { "epoch": 13.478301015697138, "grad_norm": 0.5299592614173889, "learning_rate": 2.1906e-05, "loss": 0.084, "step": 7305 }, { "epoch": 13.480147737765467, "grad_norm": 0.4677112400531769, "learning_rate": 2.1909e-05, "loss": 0.0772, "step": 7306 }, { "epoch": 13.481994459833794, "grad_norm": 1.0250064134597778, "learning_rate": 2.1912000000000003e-05, "loss": 0.0993, "step": 7307 }, { "epoch": 13.483841181902124, "grad_norm": 0.40030497312545776, "learning_rate": 2.1915000000000003e-05, "loss": 0.0496, "step": 7308 }, { "epoch": 13.485687903970453, "grad_norm": 0.639595091342926, "learning_rate": 2.1918000000000003e-05, "loss": 0.1111, "step": 7309 }, { "epoch": 13.487534626038782, "grad_norm": 0.5788593292236328, "learning_rate": 2.1921000000000002e-05, "loss": 0.0682, "step": 7310 }, { "epoch": 13.48938134810711, "grad_norm": 1.2469546794891357, "learning_rate": 2.1924000000000002e-05, "loss": 0.0278, "step": 7311 }, { "epoch": 13.491228070175438, "grad_norm": 0.4102165699005127, "learning_rate": 2.1927000000000002e-05, "loss": 0.025, "step": 7312 }, { "epoch": 13.493074792243767, "grad_norm": 0.489040344953537, "learning_rate": 2.193e-05, "loss": 0.0358, "step": 7313 }, { "epoch": 13.494921514312097, "grad_norm": 0.269023597240448, "learning_rate": 2.1932999999999998e-05, "loss": 0.0198, "step": 7314 }, { "epoch": 13.496768236380424, "grad_norm": 0.7333555221557617, "learning_rate": 2.1935999999999998e-05, "loss": 0.0973, "step": 7315 }, { "epoch": 13.498614958448753, "grad_norm": 0.6423579454421997, "learning_rate": 2.1938999999999998e-05, "loss": 0.0403, "step": 7316 }, { "epoch": 13.500461680517082, "grad_norm": 0.356412410736084, "learning_rate": 2.1942e-05, "loss": 0.0143, "step": 7317 }, { "epoch": 13.502308402585411, "grad_norm": 0.7933915853500366, "learning_rate": 2.1945e-05, "loss": 0.0432, "step": 7318 }, { "epoch": 13.504155124653739, "grad_norm": 0.3011784553527832, "learning_rate": 2.1948e-05, "loss": 0.0215, "step": 7319 }, { "epoch": 13.506001846722068, "grad_norm": 0.2465125173330307, "learning_rate": 2.1951e-05, "loss": 0.0161, "step": 7320 }, { "epoch": 13.507848568790397, "grad_norm": 0.5328660607337952, "learning_rate": 2.1954e-05, "loss": 0.0158, "step": 7321 }, { "epoch": 13.509695290858726, "grad_norm": 0.33585166931152344, "learning_rate": 2.1957e-05, "loss": 0.0195, "step": 7322 }, { "epoch": 13.511542012927055, "grad_norm": 0.4674575924873352, "learning_rate": 2.196e-05, "loss": 0.016, "step": 7323 }, { "epoch": 13.513388734995383, "grad_norm": 0.629899263381958, "learning_rate": 2.1963e-05, "loss": 0.017, "step": 7324 }, { "epoch": 13.515235457063712, "grad_norm": 0.4062348008155823, "learning_rate": 2.1966e-05, "loss": 0.0181, "step": 7325 }, { "epoch": 13.517082179132041, "grad_norm": 0.2978869676589966, "learning_rate": 2.1969e-05, "loss": 0.0141, "step": 7326 }, { "epoch": 13.51892890120037, "grad_norm": 0.3354339003562927, "learning_rate": 2.1972000000000002e-05, "loss": 0.0153, "step": 7327 }, { "epoch": 13.520775623268698, "grad_norm": 1.723165512084961, "learning_rate": 2.1975000000000002e-05, "loss": 0.0139, "step": 7328 }, { "epoch": 13.522622345337027, "grad_norm": 0.6914976239204407, "learning_rate": 2.1978000000000002e-05, "loss": 0.0161, "step": 7329 }, { "epoch": 13.524469067405356, "grad_norm": 0.48744842410087585, "learning_rate": 2.1981000000000002e-05, "loss": 0.0153, "step": 7330 }, { "epoch": 13.526315789473685, "grad_norm": 0.22318950295448303, "learning_rate": 2.1984e-05, "loss": 0.0094, "step": 7331 }, { "epoch": 13.528162511542012, "grad_norm": 1.1957658529281616, "learning_rate": 2.1987e-05, "loss": 0.0422, "step": 7332 }, { "epoch": 13.530009233610341, "grad_norm": 0.39699581265449524, "learning_rate": 2.199e-05, "loss": 0.0111, "step": 7333 }, { "epoch": 13.53185595567867, "grad_norm": 0.30261993408203125, "learning_rate": 2.1993e-05, "loss": 0.0102, "step": 7334 }, { "epoch": 13.533702677747, "grad_norm": 0.46019795536994934, "learning_rate": 2.1996e-05, "loss": 0.0151, "step": 7335 }, { "epoch": 13.535549399815327, "grad_norm": 0.6777722835540771, "learning_rate": 2.1999e-05, "loss": 0.0186, "step": 7336 }, { "epoch": 13.537396121883656, "grad_norm": 0.44539082050323486, "learning_rate": 2.2002e-05, "loss": 0.0189, "step": 7337 }, { "epoch": 13.539242843951985, "grad_norm": 1.1245629787445068, "learning_rate": 2.2005e-05, "loss": 0.0343, "step": 7338 }, { "epoch": 13.541089566020315, "grad_norm": 0.4725266695022583, "learning_rate": 2.2008e-05, "loss": 0.0116, "step": 7339 }, { "epoch": 13.542936288088642, "grad_norm": 0.5914231538772583, "learning_rate": 2.2011e-05, "loss": 0.0257, "step": 7340 }, { "epoch": 13.544783010156971, "grad_norm": 0.8138396739959717, "learning_rate": 2.2014e-05, "loss": 0.0249, "step": 7341 }, { "epoch": 13.5466297322253, "grad_norm": 0.7865918874740601, "learning_rate": 2.2017e-05, "loss": 0.0177, "step": 7342 }, { "epoch": 13.54847645429363, "grad_norm": 0.5864379405975342, "learning_rate": 2.202e-05, "loss": 0.0211, "step": 7343 }, { "epoch": 13.550323176361957, "grad_norm": 0.5314502716064453, "learning_rate": 2.2023e-05, "loss": 0.0153, "step": 7344 }, { "epoch": 13.552169898430286, "grad_norm": 1.0157397985458374, "learning_rate": 2.2026e-05, "loss": 0.0318, "step": 7345 }, { "epoch": 13.554016620498615, "grad_norm": 0.9477611184120178, "learning_rate": 2.2029e-05, "loss": 0.0208, "step": 7346 }, { "epoch": 13.555863342566944, "grad_norm": 0.9941097497940063, "learning_rate": 2.2032000000000002e-05, "loss": 0.2425, "step": 7347 }, { "epoch": 13.557710064635273, "grad_norm": 0.6913464665412903, "learning_rate": 2.2035e-05, "loss": 0.2258, "step": 7348 }, { "epoch": 13.5595567867036, "grad_norm": 1.1100223064422607, "learning_rate": 2.2038e-05, "loss": 0.211, "step": 7349 }, { "epoch": 13.56140350877193, "grad_norm": 0.7175942063331604, "learning_rate": 2.2041e-05, "loss": 0.1512, "step": 7350 }, { "epoch": 13.563250230840259, "grad_norm": 0.6584893465042114, "learning_rate": 2.2044e-05, "loss": 0.1469, "step": 7351 }, { "epoch": 13.565096952908588, "grad_norm": 1.1889525651931763, "learning_rate": 2.2047e-05, "loss": 0.2033, "step": 7352 }, { "epoch": 13.566943674976915, "grad_norm": 0.49368175864219666, "learning_rate": 2.205e-05, "loss": 0.0866, "step": 7353 }, { "epoch": 13.568790397045245, "grad_norm": 0.514544665813446, "learning_rate": 2.2053e-05, "loss": 0.0779, "step": 7354 }, { "epoch": 13.570637119113574, "grad_norm": 1.3042136430740356, "learning_rate": 2.2056e-05, "loss": 0.1321, "step": 7355 }, { "epoch": 13.572483841181903, "grad_norm": 0.49586865305900574, "learning_rate": 2.2059e-05, "loss": 0.0815, "step": 7356 }, { "epoch": 13.57433056325023, "grad_norm": 0.5277937650680542, "learning_rate": 2.2062000000000003e-05, "loss": 0.0517, "step": 7357 }, { "epoch": 13.57617728531856, "grad_norm": 0.40392178297042847, "learning_rate": 2.2065000000000003e-05, "loss": 0.0841, "step": 7358 }, { "epoch": 13.578024007386889, "grad_norm": 0.5108973979949951, "learning_rate": 2.2068000000000003e-05, "loss": 0.0392, "step": 7359 }, { "epoch": 13.579870729455218, "grad_norm": 0.4702042043209076, "learning_rate": 2.2071000000000003e-05, "loss": 0.0296, "step": 7360 }, { "epoch": 13.581717451523545, "grad_norm": 0.4801340103149414, "learning_rate": 2.2074000000000002e-05, "loss": 0.0325, "step": 7361 }, { "epoch": 13.583564173591874, "grad_norm": 0.4173518419265747, "learning_rate": 2.2077e-05, "loss": 0.0225, "step": 7362 }, { "epoch": 13.585410895660203, "grad_norm": 0.4327594041824341, "learning_rate": 2.208e-05, "loss": 0.0191, "step": 7363 }, { "epoch": 13.587257617728532, "grad_norm": 0.5534989237785339, "learning_rate": 2.2083e-05, "loss": 0.038, "step": 7364 }, { "epoch": 13.58910433979686, "grad_norm": 0.19051194190979004, "learning_rate": 2.2085999999999998e-05, "loss": 0.0087, "step": 7365 }, { "epoch": 13.590951061865189, "grad_norm": 0.4202093482017517, "learning_rate": 2.2088999999999998e-05, "loss": 0.0308, "step": 7366 }, { "epoch": 13.592797783933518, "grad_norm": 0.7560642957687378, "learning_rate": 2.2092e-05, "loss": 0.02, "step": 7367 }, { "epoch": 13.594644506001847, "grad_norm": 0.2742268145084381, "learning_rate": 2.2095e-05, "loss": 0.0078, "step": 7368 }, { "epoch": 13.596491228070175, "grad_norm": 0.5704046487808228, "learning_rate": 2.2098e-05, "loss": 0.0199, "step": 7369 }, { "epoch": 13.598337950138504, "grad_norm": 0.6069647073745728, "learning_rate": 2.2101e-05, "loss": 0.0219, "step": 7370 }, { "epoch": 13.600184672206833, "grad_norm": 0.44164136052131653, "learning_rate": 2.2104e-05, "loss": 0.0419, "step": 7371 }, { "epoch": 13.602031394275162, "grad_norm": 0.48983797430992126, "learning_rate": 2.2107e-05, "loss": 0.026, "step": 7372 }, { "epoch": 13.603878116343491, "grad_norm": 0.32292410731315613, "learning_rate": 2.211e-05, "loss": 0.0121, "step": 7373 }, { "epoch": 13.605724838411819, "grad_norm": 1.0277533531188965, "learning_rate": 2.2113e-05, "loss": 0.0186, "step": 7374 }, { "epoch": 13.607571560480148, "grad_norm": 0.6106177568435669, "learning_rate": 2.2116e-05, "loss": 0.0234, "step": 7375 }, { "epoch": 13.609418282548477, "grad_norm": 0.7649809122085571, "learning_rate": 2.2119e-05, "loss": 0.0782, "step": 7376 }, { "epoch": 13.611265004616806, "grad_norm": 0.5042434930801392, "learning_rate": 2.2122000000000003e-05, "loss": 0.0448, "step": 7377 }, { "epoch": 13.613111726685133, "grad_norm": 0.4876888394355774, "learning_rate": 2.2125000000000002e-05, "loss": 0.0156, "step": 7378 }, { "epoch": 13.614958448753463, "grad_norm": 0.40460675954818726, "learning_rate": 2.2128000000000002e-05, "loss": 0.0153, "step": 7379 }, { "epoch": 13.616805170821792, "grad_norm": 0.21005268394947052, "learning_rate": 2.2131000000000002e-05, "loss": 0.0066, "step": 7380 }, { "epoch": 13.61865189289012, "grad_norm": 0.43871554732322693, "learning_rate": 2.2134000000000002e-05, "loss": 0.0267, "step": 7381 }, { "epoch": 13.620498614958448, "grad_norm": 0.3028872013092041, "learning_rate": 2.2137e-05, "loss": 0.0171, "step": 7382 }, { "epoch": 13.622345337026777, "grad_norm": 0.775253415107727, "learning_rate": 2.214e-05, "loss": 0.0185, "step": 7383 }, { "epoch": 13.624192059095106, "grad_norm": 1.5879731178283691, "learning_rate": 2.2143e-05, "loss": 0.0149, "step": 7384 }, { "epoch": 13.626038781163436, "grad_norm": 0.7068104147911072, "learning_rate": 2.2146e-05, "loss": 0.0302, "step": 7385 }, { "epoch": 13.627885503231763, "grad_norm": 0.5778458118438721, "learning_rate": 2.2149e-05, "loss": 0.0178, "step": 7386 }, { "epoch": 13.629732225300092, "grad_norm": 0.9886907935142517, "learning_rate": 2.2151999999999997e-05, "loss": 0.0136, "step": 7387 }, { "epoch": 13.631578947368421, "grad_norm": 0.5738271474838257, "learning_rate": 2.2155e-05, "loss": 0.0197, "step": 7388 }, { "epoch": 13.63342566943675, "grad_norm": 0.7942144870758057, "learning_rate": 2.2158e-05, "loss": 0.0268, "step": 7389 }, { "epoch": 13.635272391505078, "grad_norm": 0.7159650325775146, "learning_rate": 2.2161e-05, "loss": 0.0259, "step": 7390 }, { "epoch": 13.637119113573407, "grad_norm": 0.3914990723133087, "learning_rate": 2.2164e-05, "loss": 0.017, "step": 7391 }, { "epoch": 13.638965835641736, "grad_norm": 0.36542847752571106, "learning_rate": 2.2167e-05, "loss": 0.0195, "step": 7392 }, { "epoch": 13.640812557710065, "grad_norm": 0.48772743344306946, "learning_rate": 2.217e-05, "loss": 0.0335, "step": 7393 }, { "epoch": 13.642659279778393, "grad_norm": 0.36953237652778625, "learning_rate": 2.2173e-05, "loss": 0.0122, "step": 7394 }, { "epoch": 13.644506001846722, "grad_norm": 0.5144199132919312, "learning_rate": 2.2176e-05, "loss": 0.0246, "step": 7395 }, { "epoch": 13.64635272391505, "grad_norm": 0.517334520816803, "learning_rate": 2.2179e-05, "loss": 0.0179, "step": 7396 }, { "epoch": 13.64819944598338, "grad_norm": 1.3453407287597656, "learning_rate": 2.2182e-05, "loss": 0.2604, "step": 7397 }, { "epoch": 13.65004616805171, "grad_norm": 0.7100138068199158, "learning_rate": 2.2185000000000002e-05, "loss": 0.2018, "step": 7398 }, { "epoch": 13.651892890120036, "grad_norm": 0.8211055397987366, "learning_rate": 2.2188e-05, "loss": 0.233, "step": 7399 }, { "epoch": 13.653739612188366, "grad_norm": 0.592019259929657, "learning_rate": 2.2191e-05, "loss": 0.127, "step": 7400 }, { "epoch": 13.655586334256695, "grad_norm": 0.7646599411964417, "learning_rate": 2.2194e-05, "loss": 0.14, "step": 7401 }, { "epoch": 13.657433056325024, "grad_norm": 0.44219911098480225, "learning_rate": 2.2197e-05, "loss": 0.084, "step": 7402 }, { "epoch": 13.659279778393351, "grad_norm": 0.5861028432846069, "learning_rate": 2.22e-05, "loss": 0.0807, "step": 7403 }, { "epoch": 13.66112650046168, "grad_norm": 0.6860338449478149, "learning_rate": 2.2203e-05, "loss": 0.0879, "step": 7404 }, { "epoch": 13.66297322253001, "grad_norm": 0.733318567276001, "learning_rate": 2.2206e-05, "loss": 0.0745, "step": 7405 }, { "epoch": 13.664819944598339, "grad_norm": 0.4197256863117218, "learning_rate": 2.2209e-05, "loss": 0.052, "step": 7406 }, { "epoch": 13.666666666666666, "grad_norm": 0.4183002710342407, "learning_rate": 2.2212e-05, "loss": 0.0516, "step": 7407 }, { "epoch": 13.668513388734995, "grad_norm": 0.5573427677154541, "learning_rate": 2.2215000000000003e-05, "loss": 0.0903, "step": 7408 }, { "epoch": 13.670360110803324, "grad_norm": 0.6062901616096497, "learning_rate": 2.2218000000000003e-05, "loss": 0.0867, "step": 7409 }, { "epoch": 13.672206832871654, "grad_norm": 0.3610090911388397, "learning_rate": 2.2221000000000003e-05, "loss": 0.0313, "step": 7410 }, { "epoch": 13.67405355493998, "grad_norm": 0.45338574051856995, "learning_rate": 2.2224e-05, "loss": 0.0284, "step": 7411 }, { "epoch": 13.67590027700831, "grad_norm": 0.35504356026649475, "learning_rate": 2.2227e-05, "loss": 0.0506, "step": 7412 }, { "epoch": 13.67774699907664, "grad_norm": 0.5283374786376953, "learning_rate": 2.223e-05, "loss": 0.0264, "step": 7413 }, { "epoch": 13.679593721144968, "grad_norm": 0.9853783249855042, "learning_rate": 2.2233e-05, "loss": 0.0334, "step": 7414 }, { "epoch": 13.681440443213296, "grad_norm": 0.9584597945213318, "learning_rate": 2.2236e-05, "loss": 0.074, "step": 7415 }, { "epoch": 13.683287165281625, "grad_norm": 0.34112200140953064, "learning_rate": 2.2239e-05, "loss": 0.0306, "step": 7416 }, { "epoch": 13.685133887349954, "grad_norm": 0.7212913036346436, "learning_rate": 2.2241999999999998e-05, "loss": 0.0439, "step": 7417 }, { "epoch": 13.686980609418283, "grad_norm": 0.6430798768997192, "learning_rate": 2.2245e-05, "loss": 0.026, "step": 7418 }, { "epoch": 13.68882733148661, "grad_norm": 0.4628603756427765, "learning_rate": 2.2248e-05, "loss": 0.0142, "step": 7419 }, { "epoch": 13.69067405355494, "grad_norm": 1.0656933784484863, "learning_rate": 2.2251e-05, "loss": 0.0193, "step": 7420 }, { "epoch": 13.692520775623269, "grad_norm": 0.3730289041996002, "learning_rate": 2.2254e-05, "loss": 0.0253, "step": 7421 }, { "epoch": 13.694367497691598, "grad_norm": 0.3692324161529541, "learning_rate": 2.2257e-05, "loss": 0.0121, "step": 7422 }, { "epoch": 13.696214219759927, "grad_norm": 0.2735585570335388, "learning_rate": 2.226e-05, "loss": 0.0127, "step": 7423 }, { "epoch": 13.698060941828254, "grad_norm": 0.1949063241481781, "learning_rate": 2.2263e-05, "loss": 0.0086, "step": 7424 }, { "epoch": 13.699907663896584, "grad_norm": 0.4594740867614746, "learning_rate": 2.2266e-05, "loss": 0.018, "step": 7425 }, { "epoch": 13.701754385964913, "grad_norm": 0.3455602824687958, "learning_rate": 2.2269e-05, "loss": 0.0154, "step": 7426 }, { "epoch": 13.703601108033242, "grad_norm": 0.4273536503314972, "learning_rate": 2.2272e-05, "loss": 0.0141, "step": 7427 }, { "epoch": 13.70544783010157, "grad_norm": 0.5404307246208191, "learning_rate": 2.2275000000000003e-05, "loss": 0.0281, "step": 7428 }, { "epoch": 13.707294552169898, "grad_norm": 0.8004701137542725, "learning_rate": 2.2278000000000003e-05, "loss": 0.0256, "step": 7429 }, { "epoch": 13.709141274238227, "grad_norm": 0.8993343114852905, "learning_rate": 2.2281000000000002e-05, "loss": 0.0277, "step": 7430 }, { "epoch": 13.710987996306557, "grad_norm": 0.4021378755569458, "learning_rate": 2.2284000000000002e-05, "loss": 0.0253, "step": 7431 }, { "epoch": 13.712834718374884, "grad_norm": 0.37600457668304443, "learning_rate": 2.2287000000000002e-05, "loss": 0.0132, "step": 7432 }, { "epoch": 13.714681440443213, "grad_norm": 0.4592324495315552, "learning_rate": 2.2290000000000002e-05, "loss": 0.0146, "step": 7433 }, { "epoch": 13.716528162511542, "grad_norm": 0.3562804162502289, "learning_rate": 2.2293e-05, "loss": 0.0447, "step": 7434 }, { "epoch": 13.718374884579871, "grad_norm": 0.5064547657966614, "learning_rate": 2.2296e-05, "loss": 0.0174, "step": 7435 }, { "epoch": 13.720221606648199, "grad_norm": 0.300818532705307, "learning_rate": 2.2298999999999998e-05, "loss": 0.0202, "step": 7436 }, { "epoch": 13.722068328716528, "grad_norm": 0.4283485412597656, "learning_rate": 2.2301999999999998e-05, "loss": 0.0179, "step": 7437 }, { "epoch": 13.723915050784857, "grad_norm": 0.3496239483356476, "learning_rate": 2.2305e-05, "loss": 0.0069, "step": 7438 }, { "epoch": 13.725761772853186, "grad_norm": 0.7668992280960083, "learning_rate": 2.2308e-05, "loss": 0.0335, "step": 7439 }, { "epoch": 13.727608494921514, "grad_norm": 0.43262067437171936, "learning_rate": 2.2311e-05, "loss": 0.026, "step": 7440 }, { "epoch": 13.729455216989843, "grad_norm": 0.33174577355384827, "learning_rate": 2.2314e-05, "loss": 0.0163, "step": 7441 }, { "epoch": 13.731301939058172, "grad_norm": 0.4642624258995056, "learning_rate": 2.2317e-05, "loss": 0.0273, "step": 7442 }, { "epoch": 13.733148661126501, "grad_norm": 0.8481758236885071, "learning_rate": 2.232e-05, "loss": 0.0294, "step": 7443 }, { "epoch": 13.734995383194828, "grad_norm": 1.4036601781845093, "learning_rate": 2.2323e-05, "loss": 0.0194, "step": 7444 }, { "epoch": 13.736842105263158, "grad_norm": 0.8418857455253601, "learning_rate": 2.2326e-05, "loss": 0.0256, "step": 7445 }, { "epoch": 13.738688827331487, "grad_norm": 0.9245105981826782, "learning_rate": 2.2329e-05, "loss": 0.0291, "step": 7446 }, { "epoch": 13.740535549399816, "grad_norm": 0.9908247590065002, "learning_rate": 2.2332e-05, "loss": 0.2072, "step": 7447 }, { "epoch": 13.742382271468145, "grad_norm": 0.8401052355766296, "learning_rate": 2.2335000000000002e-05, "loss": 0.1964, "step": 7448 }, { "epoch": 13.744228993536472, "grad_norm": 0.6335350275039673, "learning_rate": 2.2338000000000002e-05, "loss": 0.1581, "step": 7449 }, { "epoch": 13.746075715604801, "grad_norm": 0.9489895701408386, "learning_rate": 2.2341000000000002e-05, "loss": 0.1505, "step": 7450 }, { "epoch": 13.74792243767313, "grad_norm": 0.5921657681465149, "learning_rate": 2.2344e-05, "loss": 0.148, "step": 7451 }, { "epoch": 13.749769159741458, "grad_norm": 0.674635112285614, "learning_rate": 2.2347e-05, "loss": 0.117, "step": 7452 }, { "epoch": 13.751615881809787, "grad_norm": 0.47981318831443787, "learning_rate": 2.235e-05, "loss": 0.0772, "step": 7453 }, { "epoch": 13.753462603878116, "grad_norm": 0.6606249809265137, "learning_rate": 2.2353e-05, "loss": 0.0903, "step": 7454 }, { "epoch": 13.755309325946445, "grad_norm": 0.8978612422943115, "learning_rate": 2.2356e-05, "loss": 0.1229, "step": 7455 }, { "epoch": 13.757156048014775, "grad_norm": 0.4498438239097595, "learning_rate": 2.2359e-05, "loss": 0.0753, "step": 7456 }, { "epoch": 13.759002770083102, "grad_norm": 1.2396608591079712, "learning_rate": 2.2362e-05, "loss": 0.1475, "step": 7457 }, { "epoch": 13.760849492151431, "grad_norm": 0.9009991884231567, "learning_rate": 2.2365000000000004e-05, "loss": 0.1124, "step": 7458 }, { "epoch": 13.76269621421976, "grad_norm": 0.5656141638755798, "learning_rate": 2.2368000000000003e-05, "loss": 0.0722, "step": 7459 }, { "epoch": 13.76454293628809, "grad_norm": 0.42123159766197205, "learning_rate": 2.2371e-05, "loss": 0.0566, "step": 7460 }, { "epoch": 13.766389658356417, "grad_norm": 0.2959066927433014, "learning_rate": 2.2374e-05, "loss": 0.0391, "step": 7461 }, { "epoch": 13.768236380424746, "grad_norm": 0.43474075198173523, "learning_rate": 2.2377e-05, "loss": 0.0366, "step": 7462 }, { "epoch": 13.770083102493075, "grad_norm": 0.5660763382911682, "learning_rate": 2.238e-05, "loss": 0.018, "step": 7463 }, { "epoch": 13.771929824561404, "grad_norm": 0.48695066571235657, "learning_rate": 2.2383e-05, "loss": 0.0308, "step": 7464 }, { "epoch": 13.773776546629731, "grad_norm": 0.21219317615032196, "learning_rate": 2.2386e-05, "loss": 0.0117, "step": 7465 }, { "epoch": 13.77562326869806, "grad_norm": 1.1667358875274658, "learning_rate": 2.2389e-05, "loss": 0.0448, "step": 7466 }, { "epoch": 13.77746999076639, "grad_norm": 0.47652381658554077, "learning_rate": 2.2392e-05, "loss": 0.0472, "step": 7467 }, { "epoch": 13.779316712834719, "grad_norm": 0.35283544659614563, "learning_rate": 2.2395e-05, "loss": 0.0243, "step": 7468 }, { "epoch": 13.781163434903046, "grad_norm": 0.2358464151620865, "learning_rate": 2.2398e-05, "loss": 0.0148, "step": 7469 }, { "epoch": 13.783010156971375, "grad_norm": 0.982874870300293, "learning_rate": 2.2401e-05, "loss": 0.0433, "step": 7470 }, { "epoch": 13.784856879039705, "grad_norm": 0.3252042829990387, "learning_rate": 2.2404e-05, "loss": 0.0167, "step": 7471 }, { "epoch": 13.786703601108034, "grad_norm": 0.3061884939670563, "learning_rate": 2.2407e-05, "loss": 0.0227, "step": 7472 }, { "epoch": 13.788550323176363, "grad_norm": 0.32022055983543396, "learning_rate": 2.241e-05, "loss": 0.047, "step": 7473 }, { "epoch": 13.79039704524469, "grad_norm": 0.31437739729881287, "learning_rate": 2.2413e-05, "loss": 0.017, "step": 7474 }, { "epoch": 13.79224376731302, "grad_norm": 0.17899277806282043, "learning_rate": 2.2416e-05, "loss": 0.0158, "step": 7475 }, { "epoch": 13.794090489381349, "grad_norm": 0.4037555158138275, "learning_rate": 2.2419e-05, "loss": 0.0195, "step": 7476 }, { "epoch": 13.795937211449676, "grad_norm": 3.9704298973083496, "learning_rate": 2.2422e-05, "loss": 0.0213, "step": 7477 }, { "epoch": 13.797783933518005, "grad_norm": 0.41569051146507263, "learning_rate": 2.2425000000000003e-05, "loss": 0.0202, "step": 7478 }, { "epoch": 13.799630655586334, "grad_norm": 0.3402164876461029, "learning_rate": 2.2428000000000003e-05, "loss": 0.0166, "step": 7479 }, { "epoch": 13.801477377654663, "grad_norm": 0.5518233180046082, "learning_rate": 2.2431000000000003e-05, "loss": 0.0183, "step": 7480 }, { "epoch": 13.803324099722992, "grad_norm": 0.28906118869781494, "learning_rate": 2.2434000000000002e-05, "loss": 0.016, "step": 7481 }, { "epoch": 13.80517082179132, "grad_norm": 0.4287084937095642, "learning_rate": 2.2437000000000002e-05, "loss": 0.016, "step": 7482 }, { "epoch": 13.807017543859649, "grad_norm": 0.455166757106781, "learning_rate": 2.2440000000000002e-05, "loss": 0.0202, "step": 7483 }, { "epoch": 13.808864265927978, "grad_norm": 0.2282412201166153, "learning_rate": 2.2443000000000002e-05, "loss": 0.0142, "step": 7484 }, { "epoch": 13.810710987996307, "grad_norm": 0.37432539463043213, "learning_rate": 2.2445999999999998e-05, "loss": 0.0164, "step": 7485 }, { "epoch": 13.812557710064635, "grad_norm": 0.870344877243042, "learning_rate": 2.2448999999999998e-05, "loss": 0.0241, "step": 7486 }, { "epoch": 13.814404432132964, "grad_norm": 0.2512337267398834, "learning_rate": 2.2451999999999998e-05, "loss": 0.012, "step": 7487 }, { "epoch": 13.816251154201293, "grad_norm": 0.25623369216918945, "learning_rate": 2.2455e-05, "loss": 0.0115, "step": 7488 }, { "epoch": 13.818097876269622, "grad_norm": 0.5129430890083313, "learning_rate": 2.2458e-05, "loss": 0.0146, "step": 7489 }, { "epoch": 13.81994459833795, "grad_norm": 0.5183201432228088, "learning_rate": 2.2461e-05, "loss": 0.0184, "step": 7490 }, { "epoch": 13.821791320406279, "grad_norm": 0.6669928431510925, "learning_rate": 2.2464e-05, "loss": 0.0239, "step": 7491 }, { "epoch": 13.823638042474608, "grad_norm": 0.3957689106464386, "learning_rate": 2.2467e-05, "loss": 0.011, "step": 7492 }, { "epoch": 13.825484764542937, "grad_norm": 0.4440816044807434, "learning_rate": 2.247e-05, "loss": 0.0125, "step": 7493 }, { "epoch": 13.827331486611264, "grad_norm": 0.4568639099597931, "learning_rate": 2.2473e-05, "loss": 0.0172, "step": 7494 }, { "epoch": 13.829178208679593, "grad_norm": 0.7566015720367432, "learning_rate": 2.2476e-05, "loss": 0.0202, "step": 7495 }, { "epoch": 13.831024930747922, "grad_norm": 0.6292212605476379, "learning_rate": 2.2479e-05, "loss": 0.0255, "step": 7496 }, { "epoch": 13.832871652816252, "grad_norm": 0.9029131531715393, "learning_rate": 2.2482e-05, "loss": 0.2179, "step": 7497 }, { "epoch": 13.83471837488458, "grad_norm": 0.6349799633026123, "learning_rate": 2.2485000000000002e-05, "loss": 0.1667, "step": 7498 }, { "epoch": 13.836565096952908, "grad_norm": 0.5616112351417542, "learning_rate": 2.2488000000000002e-05, "loss": 0.1714, "step": 7499 }, { "epoch": 13.838411819021237, "grad_norm": 0.6807947158813477, "learning_rate": 2.2491000000000002e-05, "loss": 0.1424, "step": 7500 }, { "epoch": 13.840258541089566, "grad_norm": 0.5123181343078613, "learning_rate": 2.2494000000000002e-05, "loss": 0.0932, "step": 7501 }, { "epoch": 13.842105263157894, "grad_norm": 0.6139744520187378, "learning_rate": 2.2497e-05, "loss": 0.096, "step": 7502 }, { "epoch": 13.843951985226223, "grad_norm": 0.7582729458808899, "learning_rate": 2.25e-05, "loss": 0.1449, "step": 7503 }, { "epoch": 13.845798707294552, "grad_norm": 0.5244767069816589, "learning_rate": 2.2503e-05, "loss": 0.0888, "step": 7504 }, { "epoch": 13.847645429362881, "grad_norm": 0.685250997543335, "learning_rate": 2.2506e-05, "loss": 0.0891, "step": 7505 }, { "epoch": 13.84949215143121, "grad_norm": 0.5729045271873474, "learning_rate": 2.2509e-05, "loss": 0.0945, "step": 7506 }, { "epoch": 13.851338873499538, "grad_norm": 0.76090407371521, "learning_rate": 2.2512e-05, "loss": 0.0704, "step": 7507 }, { "epoch": 13.853185595567867, "grad_norm": 0.9119696617126465, "learning_rate": 2.2515e-05, "loss": 0.0903, "step": 7508 }, { "epoch": 13.855032317636196, "grad_norm": 0.2945483922958374, "learning_rate": 2.2518e-05, "loss": 0.0369, "step": 7509 }, { "epoch": 13.856879039704525, "grad_norm": 0.7408194541931152, "learning_rate": 2.2521e-05, "loss": 0.133, "step": 7510 }, { "epoch": 13.858725761772853, "grad_norm": 0.37890946865081787, "learning_rate": 2.2524e-05, "loss": 0.0345, "step": 7511 }, { "epoch": 13.860572483841182, "grad_norm": 0.49662187695503235, "learning_rate": 2.2527e-05, "loss": 0.0414, "step": 7512 }, { "epoch": 13.86241920590951, "grad_norm": 0.4859124422073364, "learning_rate": 2.253e-05, "loss": 0.0302, "step": 7513 }, { "epoch": 13.86426592797784, "grad_norm": 0.34445253014564514, "learning_rate": 2.2533e-05, "loss": 0.0171, "step": 7514 }, { "epoch": 13.866112650046167, "grad_norm": 0.549643337726593, "learning_rate": 2.2536e-05, "loss": 0.0323, "step": 7515 }, { "epoch": 13.867959372114496, "grad_norm": 0.4903494417667389, "learning_rate": 2.2539e-05, "loss": 0.0471, "step": 7516 }, { "epoch": 13.869806094182826, "grad_norm": 0.3244134187698364, "learning_rate": 2.2542e-05, "loss": 0.0183, "step": 7517 }, { "epoch": 13.871652816251155, "grad_norm": 0.3773230016231537, "learning_rate": 2.2545e-05, "loss": 0.0282, "step": 7518 }, { "epoch": 13.873499538319482, "grad_norm": 0.5452808737754822, "learning_rate": 2.2548e-05, "loss": 0.0256, "step": 7519 }, { "epoch": 13.875346260387811, "grad_norm": 0.48886626958847046, "learning_rate": 2.2551e-05, "loss": 0.0276, "step": 7520 }, { "epoch": 13.87719298245614, "grad_norm": 0.47622591257095337, "learning_rate": 2.2554e-05, "loss": 0.0181, "step": 7521 }, { "epoch": 13.87903970452447, "grad_norm": 0.4958360195159912, "learning_rate": 2.2557e-05, "loss": 0.0228, "step": 7522 }, { "epoch": 13.880886426592799, "grad_norm": 0.3116703927516937, "learning_rate": 2.256e-05, "loss": 0.0171, "step": 7523 }, { "epoch": 13.882733148661126, "grad_norm": 0.7549593448638916, "learning_rate": 2.2563e-05, "loss": 0.0353, "step": 7524 }, { "epoch": 13.884579870729455, "grad_norm": 0.2600730359554291, "learning_rate": 2.2566e-05, "loss": 0.0107, "step": 7525 }, { "epoch": 13.886426592797784, "grad_norm": 0.1800379902124405, "learning_rate": 2.2569e-05, "loss": 0.0117, "step": 7526 }, { "epoch": 13.888273314866112, "grad_norm": 0.3467258810997009, "learning_rate": 2.2572e-05, "loss": 0.0173, "step": 7527 }, { "epoch": 13.89012003693444, "grad_norm": 0.45100924372673035, "learning_rate": 2.2575e-05, "loss": 0.0207, "step": 7528 }, { "epoch": 13.89196675900277, "grad_norm": 0.4383341670036316, "learning_rate": 2.2578000000000003e-05, "loss": 0.0171, "step": 7529 }, { "epoch": 13.8938134810711, "grad_norm": 0.2968244254589081, "learning_rate": 2.2581000000000003e-05, "loss": 0.016, "step": 7530 }, { "epoch": 13.895660203139428, "grad_norm": 0.7290030121803284, "learning_rate": 2.2584000000000003e-05, "loss": 0.0268, "step": 7531 }, { "epoch": 13.897506925207756, "grad_norm": 0.5313127636909485, "learning_rate": 2.2587000000000002e-05, "loss": 0.0235, "step": 7532 }, { "epoch": 13.899353647276085, "grad_norm": 0.7194654941558838, "learning_rate": 2.2590000000000002e-05, "loss": 0.0204, "step": 7533 }, { "epoch": 13.901200369344414, "grad_norm": 0.8485562801361084, "learning_rate": 2.2593e-05, "loss": 0.013, "step": 7534 }, { "epoch": 13.903047091412743, "grad_norm": 0.4126611649990082, "learning_rate": 2.2596e-05, "loss": 0.0223, "step": 7535 }, { "epoch": 13.90489381348107, "grad_norm": 0.5421310663223267, "learning_rate": 2.2598999999999998e-05, "loss": 0.0301, "step": 7536 }, { "epoch": 13.9067405355494, "grad_norm": 0.5941007137298584, "learning_rate": 2.2601999999999998e-05, "loss": 0.027, "step": 7537 }, { "epoch": 13.908587257617729, "grad_norm": 0.5690109729766846, "learning_rate": 2.2604999999999998e-05, "loss": 0.035, "step": 7538 }, { "epoch": 13.910433979686058, "grad_norm": 0.3790777027606964, "learning_rate": 2.2608e-05, "loss": 0.0122, "step": 7539 }, { "epoch": 13.912280701754385, "grad_norm": 0.5342270731925964, "learning_rate": 2.2611e-05, "loss": 0.028, "step": 7540 }, { "epoch": 13.914127423822714, "grad_norm": 0.4844706952571869, "learning_rate": 2.2614e-05, "loss": 0.0173, "step": 7541 }, { "epoch": 13.915974145891044, "grad_norm": 0.4223809838294983, "learning_rate": 2.2617e-05, "loss": 0.0214, "step": 7542 }, { "epoch": 13.917820867959373, "grad_norm": 0.4551135301589966, "learning_rate": 2.262e-05, "loss": 0.0239, "step": 7543 }, { "epoch": 13.9196675900277, "grad_norm": 0.4099441468715668, "learning_rate": 2.2623e-05, "loss": 0.0251, "step": 7544 }, { "epoch": 13.92151431209603, "grad_norm": 0.31305715441703796, "learning_rate": 2.2626e-05, "loss": 0.0124, "step": 7545 }, { "epoch": 13.923361034164358, "grad_norm": 0.6895094513893127, "learning_rate": 2.2629e-05, "loss": 0.0132, "step": 7546 }, { "epoch": 13.925207756232687, "grad_norm": 1.553663730621338, "learning_rate": 2.2632e-05, "loss": 0.3124, "step": 7547 }, { "epoch": 13.927054478301017, "grad_norm": 0.7344269752502441, "learning_rate": 2.2635e-05, "loss": 0.1667, "step": 7548 }, { "epoch": 13.928901200369344, "grad_norm": 0.867383599281311, "learning_rate": 2.2638000000000002e-05, "loss": 0.1546, "step": 7549 }, { "epoch": 13.930747922437673, "grad_norm": 0.5955139398574829, "learning_rate": 2.2641000000000002e-05, "loss": 0.1512, "step": 7550 }, { "epoch": 13.932594644506002, "grad_norm": 0.5191391110420227, "learning_rate": 2.2644000000000002e-05, "loss": 0.1248, "step": 7551 }, { "epoch": 13.93444136657433, "grad_norm": 0.9385023713111877, "learning_rate": 2.2647000000000002e-05, "loss": 0.1216, "step": 7552 }, { "epoch": 13.936288088642659, "grad_norm": 0.6964015960693359, "learning_rate": 2.265e-05, "loss": 0.1049, "step": 7553 }, { "epoch": 13.938134810710988, "grad_norm": 0.7118395566940308, "learning_rate": 2.2653e-05, "loss": 0.0931, "step": 7554 }, { "epoch": 13.939981532779317, "grad_norm": 0.630588948726654, "learning_rate": 2.2656e-05, "loss": 0.0625, "step": 7555 }, { "epoch": 13.941828254847646, "grad_norm": 0.5488806366920471, "learning_rate": 2.2659e-05, "loss": 0.1024, "step": 7556 }, { "epoch": 13.943674976915974, "grad_norm": 0.36687353253364563, "learning_rate": 2.2662e-05, "loss": 0.0363, "step": 7557 }, { "epoch": 13.945521698984303, "grad_norm": 0.7587217688560486, "learning_rate": 2.2665e-05, "loss": 0.0511, "step": 7558 }, { "epoch": 13.947368421052632, "grad_norm": 0.7604022026062012, "learning_rate": 2.2668e-05, "loss": 0.1023, "step": 7559 }, { "epoch": 13.949215143120961, "grad_norm": 0.37718528509140015, "learning_rate": 2.2671e-05, "loss": 0.0245, "step": 7560 }, { "epoch": 13.951061865189288, "grad_norm": 0.709337055683136, "learning_rate": 2.2674e-05, "loss": 0.0252, "step": 7561 }, { "epoch": 13.952908587257618, "grad_norm": 0.26128262281417847, "learning_rate": 2.2677e-05, "loss": 0.0245, "step": 7562 }, { "epoch": 13.954755309325947, "grad_norm": 0.4799002707004547, "learning_rate": 2.268e-05, "loss": 0.0175, "step": 7563 }, { "epoch": 13.956602031394276, "grad_norm": 0.3801664113998413, "learning_rate": 2.2683e-05, "loss": 0.0173, "step": 7564 }, { "epoch": 13.958448753462603, "grad_norm": 0.4788198173046112, "learning_rate": 2.2686e-05, "loss": 0.0225, "step": 7565 }, { "epoch": 13.960295475530932, "grad_norm": 0.3012051582336426, "learning_rate": 2.2689e-05, "loss": 0.0094, "step": 7566 }, { "epoch": 13.962142197599261, "grad_norm": 0.39352113008499146, "learning_rate": 2.2692e-05, "loss": 0.0176, "step": 7567 }, { "epoch": 13.96398891966759, "grad_norm": 0.3267962336540222, "learning_rate": 2.2695e-05, "loss": 0.0187, "step": 7568 }, { "epoch": 13.965835641735918, "grad_norm": 0.3652357757091522, "learning_rate": 2.2698000000000002e-05, "loss": 0.0163, "step": 7569 }, { "epoch": 13.967682363804247, "grad_norm": 0.29219409823417664, "learning_rate": 2.2701000000000002e-05, "loss": 0.0299, "step": 7570 }, { "epoch": 13.969529085872576, "grad_norm": 0.4231150150299072, "learning_rate": 2.2704e-05, "loss": 0.0173, "step": 7571 }, { "epoch": 13.971375807940905, "grad_norm": 0.8734736442565918, "learning_rate": 2.2707e-05, "loss": 0.0251, "step": 7572 }, { "epoch": 13.973222530009235, "grad_norm": 1.4855594635009766, "learning_rate": 2.271e-05, "loss": 0.0332, "step": 7573 }, { "epoch": 13.975069252077562, "grad_norm": 0.3971326947212219, "learning_rate": 2.2713e-05, "loss": 0.015, "step": 7574 }, { "epoch": 13.976915974145891, "grad_norm": 0.2669675052165985, "learning_rate": 2.2716e-05, "loss": 0.0083, "step": 7575 }, { "epoch": 13.97876269621422, "grad_norm": 0.33655616641044617, "learning_rate": 2.2719e-05, "loss": 0.0166, "step": 7576 }, { "epoch": 13.980609418282548, "grad_norm": 0.46512511372566223, "learning_rate": 2.2722e-05, "loss": 0.0172, "step": 7577 }, { "epoch": 13.982456140350877, "grad_norm": 0.7615096569061279, "learning_rate": 2.2725e-05, "loss": 0.0372, "step": 7578 }, { "epoch": 13.984302862419206, "grad_norm": 0.5171358585357666, "learning_rate": 2.2728000000000003e-05, "loss": 0.0172, "step": 7579 }, { "epoch": 13.986149584487535, "grad_norm": 0.46097612380981445, "learning_rate": 2.2731000000000003e-05, "loss": 0.0185, "step": 7580 }, { "epoch": 13.987996306555864, "grad_norm": 1.4962469339370728, "learning_rate": 2.2734000000000003e-05, "loss": 0.0318, "step": 7581 }, { "epoch": 13.989843028624191, "grad_norm": 0.5866150856018066, "learning_rate": 2.2737000000000003e-05, "loss": 0.0173, "step": 7582 }, { "epoch": 13.99168975069252, "grad_norm": 0.3140173554420471, "learning_rate": 2.274e-05, "loss": 0.0162, "step": 7583 }, { "epoch": 13.99353647276085, "grad_norm": 1.4404232501983643, "learning_rate": 2.2743e-05, "loss": 0.0216, "step": 7584 }, { "epoch": 13.995383194829179, "grad_norm": 0.34213533997535706, "learning_rate": 2.2746e-05, "loss": 0.0113, "step": 7585 }, { "epoch": 13.997229916897506, "grad_norm": 0.5488388538360596, "learning_rate": 2.2749e-05, "loss": 0.0235, "step": 7586 }, { "epoch": 13.999076638965835, "grad_norm": 0.4100989103317261, "learning_rate": 2.2752e-05, "loss": 0.0137, "step": 7587 }, { "epoch": 14.0, "grad_norm": 0.712295413017273, "learning_rate": 2.2754999999999998e-05, "loss": 0.0101, "step": 7588 }, { "epoch": 14.00184672206833, "grad_norm": 1.9446412324905396, "learning_rate": 2.2758e-05, "loss": 0.1777, "step": 7589 }, { "epoch": 14.003693444136658, "grad_norm": 2.1511576175689697, "learning_rate": 2.2761e-05, "loss": 0.1542, "step": 7590 }, { "epoch": 14.005540166204986, "grad_norm": 0.7647144794464111, "learning_rate": 2.2764e-05, "loss": 0.1752, "step": 7591 }, { "epoch": 14.007386888273315, "grad_norm": 1.0041519403457642, "learning_rate": 2.2767e-05, "loss": 0.1497, "step": 7592 }, { "epoch": 14.009233610341644, "grad_norm": 0.776584804058075, "learning_rate": 2.277e-05, "loss": 0.1156, "step": 7593 }, { "epoch": 14.011080332409973, "grad_norm": 4.055688381195068, "learning_rate": 2.2773e-05, "loss": 0.1211, "step": 7594 }, { "epoch": 14.0129270544783, "grad_norm": 0.8145771622657776, "learning_rate": 2.2776e-05, "loss": 0.1183, "step": 7595 }, { "epoch": 14.01477377654663, "grad_norm": 0.6137973070144653, "learning_rate": 2.2779e-05, "loss": 0.0915, "step": 7596 }, { "epoch": 14.016620498614959, "grad_norm": 0.6850339770317078, "learning_rate": 2.2782e-05, "loss": 0.1176, "step": 7597 }, { "epoch": 14.018467220683288, "grad_norm": 0.7108796238899231, "learning_rate": 2.2785e-05, "loss": 0.0925, "step": 7598 }, { "epoch": 14.020313942751615, "grad_norm": 0.8418132066726685, "learning_rate": 2.2788000000000003e-05, "loss": 0.1094, "step": 7599 }, { "epoch": 14.022160664819944, "grad_norm": 0.49468469619750977, "learning_rate": 2.2791000000000003e-05, "loss": 0.0733, "step": 7600 }, { "epoch": 14.024007386888274, "grad_norm": 0.4118848741054535, "learning_rate": 2.2794000000000002e-05, "loss": 0.0538, "step": 7601 }, { "epoch": 14.025854108956603, "grad_norm": 0.46933019161224365, "learning_rate": 2.2797000000000002e-05, "loss": 0.0378, "step": 7602 }, { "epoch": 14.02770083102493, "grad_norm": 0.514575183391571, "learning_rate": 2.2800000000000002e-05, "loss": 0.0528, "step": 7603 }, { "epoch": 14.02954755309326, "grad_norm": 0.41290318965911865, "learning_rate": 2.2803000000000002e-05, "loss": 0.0375, "step": 7604 }, { "epoch": 14.031394275161588, "grad_norm": 0.6624583005905151, "learning_rate": 2.2806e-05, "loss": 0.0243, "step": 7605 }, { "epoch": 14.033240997229917, "grad_norm": 0.4341171085834503, "learning_rate": 2.2809e-05, "loss": 0.0255, "step": 7606 }, { "epoch": 14.035087719298245, "grad_norm": 0.4499518871307373, "learning_rate": 2.2812e-05, "loss": 0.0431, "step": 7607 }, { "epoch": 14.036934441366574, "grad_norm": 2.0344090461730957, "learning_rate": 2.2814999999999998e-05, "loss": 0.0187, "step": 7608 }, { "epoch": 14.038781163434903, "grad_norm": 0.7375814914703369, "learning_rate": 2.2818e-05, "loss": 0.0182, "step": 7609 }, { "epoch": 14.040627885503232, "grad_norm": 0.45870843529701233, "learning_rate": 2.2821e-05, "loss": 0.0107, "step": 7610 }, { "epoch": 14.04247460757156, "grad_norm": 0.905041515827179, "learning_rate": 2.2824e-05, "loss": 0.0232, "step": 7611 }, { "epoch": 14.044321329639889, "grad_norm": 0.8704097867012024, "learning_rate": 2.2827e-05, "loss": 0.0249, "step": 7612 }, { "epoch": 14.046168051708218, "grad_norm": 0.29793083667755127, "learning_rate": 2.283e-05, "loss": 0.0115, "step": 7613 }, { "epoch": 14.048014773776547, "grad_norm": 0.43679356575012207, "learning_rate": 2.2833e-05, "loss": 0.0217, "step": 7614 }, { "epoch": 14.049861495844876, "grad_norm": 1.4915803670883179, "learning_rate": 2.2836e-05, "loss": 0.0183, "step": 7615 }, { "epoch": 14.051708217913204, "grad_norm": 0.692267119884491, "learning_rate": 2.2839e-05, "loss": 0.0362, "step": 7616 }, { "epoch": 14.053554939981533, "grad_norm": 0.4739607274532318, "learning_rate": 2.2842e-05, "loss": 0.0232, "step": 7617 }, { "epoch": 14.055401662049862, "grad_norm": 0.25771304965019226, "learning_rate": 2.2845e-05, "loss": 0.0114, "step": 7618 }, { "epoch": 14.057248384118191, "grad_norm": 0.2940288484096527, "learning_rate": 2.2848000000000002e-05, "loss": 0.0177, "step": 7619 }, { "epoch": 14.059095106186518, "grad_norm": 0.6190711855888367, "learning_rate": 2.2851000000000002e-05, "loss": 0.0162, "step": 7620 }, { "epoch": 14.060941828254848, "grad_norm": 0.4986996352672577, "learning_rate": 2.2854000000000002e-05, "loss": 0.0123, "step": 7621 }, { "epoch": 14.062788550323177, "grad_norm": 0.4452718496322632, "learning_rate": 2.2857e-05, "loss": 0.0198, "step": 7622 }, { "epoch": 14.064635272391506, "grad_norm": 0.269513338804245, "learning_rate": 2.286e-05, "loss": 0.011, "step": 7623 }, { "epoch": 14.066481994459833, "grad_norm": 0.3024609684944153, "learning_rate": 2.2863e-05, "loss": 0.0112, "step": 7624 }, { "epoch": 14.068328716528162, "grad_norm": 0.9202260375022888, "learning_rate": 2.2866e-05, "loss": 0.0176, "step": 7625 }, { "epoch": 14.070175438596491, "grad_norm": 0.44851964712142944, "learning_rate": 2.2869e-05, "loss": 0.0417, "step": 7626 }, { "epoch": 14.07202216066482, "grad_norm": 0.5856828093528748, "learning_rate": 2.2872e-05, "loss": 0.0176, "step": 7627 }, { "epoch": 14.073868882733148, "grad_norm": 0.2961845099925995, "learning_rate": 2.2875e-05, "loss": 0.0104, "step": 7628 }, { "epoch": 14.075715604801477, "grad_norm": 0.34900930523872375, "learning_rate": 2.2878e-05, "loss": 0.0187, "step": 7629 }, { "epoch": 14.077562326869806, "grad_norm": 0.7918524742126465, "learning_rate": 2.2881000000000003e-05, "loss": 0.0217, "step": 7630 }, { "epoch": 14.079409048938135, "grad_norm": 0.7197925448417664, "learning_rate": 2.2884000000000003e-05, "loss": 0.031, "step": 7631 }, { "epoch": 14.081255771006463, "grad_norm": 0.2891060709953308, "learning_rate": 2.2887e-05, "loss": 0.0096, "step": 7632 }, { "epoch": 14.083102493074792, "grad_norm": 0.6610003709793091, "learning_rate": 2.289e-05, "loss": 0.021, "step": 7633 }, { "epoch": 14.084949215143121, "grad_norm": 0.6482753157615662, "learning_rate": 2.2893e-05, "loss": 0.0185, "step": 7634 }, { "epoch": 14.08679593721145, "grad_norm": 0.3223477602005005, "learning_rate": 2.2896e-05, "loss": 0.0109, "step": 7635 }, { "epoch": 14.088642659279778, "grad_norm": 0.5384102463722229, "learning_rate": 2.2899e-05, "loss": 0.01, "step": 7636 }, { "epoch": 14.090489381348107, "grad_norm": 0.6918799877166748, "learning_rate": 2.2902e-05, "loss": 0.0252, "step": 7637 }, { "epoch": 14.092336103416436, "grad_norm": 0.864923894405365, "learning_rate": 2.2905e-05, "loss": 0.0249, "step": 7638 }, { "epoch": 14.094182825484765, "grad_norm": 0.9709484577178955, "learning_rate": 2.2907999999999998e-05, "loss": 0.2236, "step": 7639 }, { "epoch": 14.096029547553094, "grad_norm": 0.9872021079063416, "learning_rate": 2.2911e-05, "loss": 0.2536, "step": 7640 }, { "epoch": 14.097876269621421, "grad_norm": 0.7489765882492065, "learning_rate": 2.2914e-05, "loss": 0.1795, "step": 7641 }, { "epoch": 14.09972299168975, "grad_norm": 0.7499869465827942, "learning_rate": 2.2917e-05, "loss": 0.1119, "step": 7642 }, { "epoch": 14.10156971375808, "grad_norm": 0.6760444045066833, "learning_rate": 2.292e-05, "loss": 0.1, "step": 7643 }, { "epoch": 14.103416435826409, "grad_norm": 1.1884748935699463, "learning_rate": 2.2923e-05, "loss": 0.0716, "step": 7644 }, { "epoch": 14.105263157894736, "grad_norm": 0.6572530269622803, "learning_rate": 2.2926e-05, "loss": 0.0893, "step": 7645 }, { "epoch": 14.107109879963065, "grad_norm": 0.5504191517829895, "learning_rate": 2.2929e-05, "loss": 0.098, "step": 7646 }, { "epoch": 14.108956602031395, "grad_norm": 0.5794324278831482, "learning_rate": 2.2932e-05, "loss": 0.0736, "step": 7647 }, { "epoch": 14.110803324099724, "grad_norm": 0.39542296528816223, "learning_rate": 2.2935e-05, "loss": 0.0534, "step": 7648 }, { "epoch": 14.112650046168051, "grad_norm": 0.3459565043449402, "learning_rate": 2.2938e-05, "loss": 0.0387, "step": 7649 }, { "epoch": 14.11449676823638, "grad_norm": 0.9930928349494934, "learning_rate": 2.2941000000000003e-05, "loss": 0.047, "step": 7650 }, { "epoch": 14.11634349030471, "grad_norm": 0.4960794150829315, "learning_rate": 2.2944000000000003e-05, "loss": 0.0401, "step": 7651 }, { "epoch": 14.118190212373039, "grad_norm": 0.4362480342388153, "learning_rate": 2.2947000000000002e-05, "loss": 0.0395, "step": 7652 }, { "epoch": 14.120036934441366, "grad_norm": 0.43307340145111084, "learning_rate": 2.2950000000000002e-05, "loss": 0.0225, "step": 7653 }, { "epoch": 14.121883656509695, "grad_norm": 0.5992100834846497, "learning_rate": 2.2953000000000002e-05, "loss": 0.046, "step": 7654 }, { "epoch": 14.123730378578024, "grad_norm": 0.4235561490058899, "learning_rate": 2.2956000000000002e-05, "loss": 0.0209, "step": 7655 }, { "epoch": 14.125577100646353, "grad_norm": 0.3926125764846802, "learning_rate": 2.2959e-05, "loss": 0.0187, "step": 7656 }, { "epoch": 14.12742382271468, "grad_norm": 0.7339117527008057, "learning_rate": 2.2961999999999998e-05, "loss": 0.0595, "step": 7657 }, { "epoch": 14.12927054478301, "grad_norm": 0.4378303289413452, "learning_rate": 2.2964999999999998e-05, "loss": 0.0212, "step": 7658 }, { "epoch": 14.131117266851339, "grad_norm": 0.47204580903053284, "learning_rate": 2.2967999999999998e-05, "loss": 0.0407, "step": 7659 }, { "epoch": 14.132963988919668, "grad_norm": 0.31966009736061096, "learning_rate": 2.2971e-05, "loss": 0.0175, "step": 7660 }, { "epoch": 14.134810710987995, "grad_norm": 0.3838869333267212, "learning_rate": 2.2974e-05, "loss": 0.0186, "step": 7661 }, { "epoch": 14.136657433056325, "grad_norm": 0.19909749925136566, "learning_rate": 2.2977e-05, "loss": 0.0114, "step": 7662 }, { "epoch": 14.138504155124654, "grad_norm": 0.3598874807357788, "learning_rate": 2.298e-05, "loss": 0.0343, "step": 7663 }, { "epoch": 14.140350877192983, "grad_norm": 0.18918880820274353, "learning_rate": 2.2983e-05, "loss": 0.0096, "step": 7664 }, { "epoch": 14.142197599261312, "grad_norm": 0.2890123724937439, "learning_rate": 2.2986e-05, "loss": 0.0129, "step": 7665 }, { "epoch": 14.14404432132964, "grad_norm": 0.27688318490982056, "learning_rate": 2.2989e-05, "loss": 0.0144, "step": 7666 }, { "epoch": 14.145891043397969, "grad_norm": 0.46568000316619873, "learning_rate": 2.2992e-05, "loss": 0.0704, "step": 7667 }, { "epoch": 14.147737765466298, "grad_norm": 0.5880398750305176, "learning_rate": 2.2995e-05, "loss": 0.0179, "step": 7668 }, { "epoch": 14.149584487534627, "grad_norm": 0.28892356157302856, "learning_rate": 2.2998e-05, "loss": 0.011, "step": 7669 }, { "epoch": 14.151431209602954, "grad_norm": 0.5236003994941711, "learning_rate": 2.3001000000000002e-05, "loss": 0.0365, "step": 7670 }, { "epoch": 14.153277931671283, "grad_norm": 0.17960208654403687, "learning_rate": 2.3004000000000002e-05, "loss": 0.0081, "step": 7671 }, { "epoch": 14.155124653739612, "grad_norm": 0.45658397674560547, "learning_rate": 2.3007000000000002e-05, "loss": 0.0189, "step": 7672 }, { "epoch": 14.156971375807942, "grad_norm": 0.3621665835380554, "learning_rate": 2.301e-05, "loss": 0.0202, "step": 7673 }, { "epoch": 14.158818097876269, "grad_norm": 0.30046194791793823, "learning_rate": 2.3013e-05, "loss": 0.0108, "step": 7674 }, { "epoch": 14.160664819944598, "grad_norm": 0.33425599336624146, "learning_rate": 2.3016e-05, "loss": 0.0173, "step": 7675 }, { "epoch": 14.162511542012927, "grad_norm": 0.20881663262844086, "learning_rate": 2.3019e-05, "loss": 0.0091, "step": 7676 }, { "epoch": 14.164358264081256, "grad_norm": 0.8621287941932678, "learning_rate": 2.3022e-05, "loss": 0.027, "step": 7677 }, { "epoch": 14.166204986149584, "grad_norm": 0.21208317577838898, "learning_rate": 2.3025e-05, "loss": 0.0079, "step": 7678 }, { "epoch": 14.168051708217913, "grad_norm": 1.173541784286499, "learning_rate": 2.3028e-05, "loss": 0.0178, "step": 7679 }, { "epoch": 14.169898430286242, "grad_norm": 0.2508636713027954, "learning_rate": 2.3031000000000004e-05, "loss": 0.0074, "step": 7680 }, { "epoch": 14.171745152354571, "grad_norm": 0.30045756697654724, "learning_rate": 2.3034e-05, "loss": 0.0083, "step": 7681 }, { "epoch": 14.173591874422899, "grad_norm": 0.38787662982940674, "learning_rate": 2.3037e-05, "loss": 0.0241, "step": 7682 }, { "epoch": 14.175438596491228, "grad_norm": 0.7570717930793762, "learning_rate": 2.304e-05, "loss": 0.0133, "step": 7683 }, { "epoch": 14.177285318559557, "grad_norm": 0.885297954082489, "learning_rate": 2.3043e-05, "loss": 0.0244, "step": 7684 }, { "epoch": 14.179132040627886, "grad_norm": 1.052407145500183, "learning_rate": 2.3046e-05, "loss": 0.023, "step": 7685 }, { "epoch": 14.180978762696213, "grad_norm": 0.8462616801261902, "learning_rate": 2.3049e-05, "loss": 0.038, "step": 7686 }, { "epoch": 14.182825484764543, "grad_norm": 0.41136690974235535, "learning_rate": 2.3052e-05, "loss": 0.0126, "step": 7687 }, { "epoch": 14.184672206832872, "grad_norm": 0.42739972472190857, "learning_rate": 2.3055e-05, "loss": 0.0146, "step": 7688 }, { "epoch": 14.1865189289012, "grad_norm": 0.8237746953964233, "learning_rate": 2.3058e-05, "loss": 0.2013, "step": 7689 }, { "epoch": 14.18836565096953, "grad_norm": 0.7207422852516174, "learning_rate": 2.3061e-05, "loss": 0.208, "step": 7690 }, { "epoch": 14.190212373037857, "grad_norm": 0.9960768222808838, "learning_rate": 2.3064e-05, "loss": 0.1293, "step": 7691 }, { "epoch": 14.192059095106186, "grad_norm": 0.8917984962463379, "learning_rate": 2.3067e-05, "loss": 0.1429, "step": 7692 }, { "epoch": 14.193905817174516, "grad_norm": 0.5600876808166504, "learning_rate": 2.307e-05, "loss": 0.1572, "step": 7693 }, { "epoch": 14.195752539242845, "grad_norm": 0.4889839291572571, "learning_rate": 2.3073e-05, "loss": 0.1045, "step": 7694 }, { "epoch": 14.197599261311172, "grad_norm": 0.44030889868736267, "learning_rate": 2.3076e-05, "loss": 0.0875, "step": 7695 }, { "epoch": 14.199445983379501, "grad_norm": 0.48708662390708923, "learning_rate": 2.3079e-05, "loss": 0.0865, "step": 7696 }, { "epoch": 14.20129270544783, "grad_norm": 0.5878287553787231, "learning_rate": 2.3082e-05, "loss": 0.0882, "step": 7697 }, { "epoch": 14.20313942751616, "grad_norm": 0.6096134185791016, "learning_rate": 2.3085e-05, "loss": 0.1273, "step": 7698 }, { "epoch": 14.204986149584487, "grad_norm": 0.41568949818611145, "learning_rate": 2.3088e-05, "loss": 0.0412, "step": 7699 }, { "epoch": 14.206832871652816, "grad_norm": 0.8116822242736816, "learning_rate": 2.3091000000000003e-05, "loss": 0.0527, "step": 7700 }, { "epoch": 14.208679593721145, "grad_norm": 0.5746693015098572, "learning_rate": 2.3094000000000003e-05, "loss": 0.0963, "step": 7701 }, { "epoch": 14.210526315789474, "grad_norm": 0.4817805290222168, "learning_rate": 2.3097000000000003e-05, "loss": 0.0589, "step": 7702 }, { "epoch": 14.212373037857802, "grad_norm": 0.4487169086933136, "learning_rate": 2.3100000000000002e-05, "loss": 0.0365, "step": 7703 }, { "epoch": 14.21421975992613, "grad_norm": 0.5302636027336121, "learning_rate": 2.3103000000000002e-05, "loss": 0.0307, "step": 7704 }, { "epoch": 14.21606648199446, "grad_norm": 0.22351866960525513, "learning_rate": 2.3106000000000002e-05, "loss": 0.0133, "step": 7705 }, { "epoch": 14.21791320406279, "grad_norm": 0.41186782717704773, "learning_rate": 2.3109e-05, "loss": 0.0186, "step": 7706 }, { "epoch": 14.219759926131117, "grad_norm": 0.40214699506759644, "learning_rate": 2.3111999999999998e-05, "loss": 0.0177, "step": 7707 }, { "epoch": 14.221606648199446, "grad_norm": 0.24294708669185638, "learning_rate": 2.3114999999999998e-05, "loss": 0.0137, "step": 7708 }, { "epoch": 14.223453370267775, "grad_norm": 0.5617495775222778, "learning_rate": 2.3117999999999998e-05, "loss": 0.0234, "step": 7709 }, { "epoch": 14.225300092336104, "grad_norm": 0.8240264654159546, "learning_rate": 2.3121e-05, "loss": 0.0257, "step": 7710 }, { "epoch": 14.227146814404431, "grad_norm": 0.5336347818374634, "learning_rate": 2.3124e-05, "loss": 0.039, "step": 7711 }, { "epoch": 14.22899353647276, "grad_norm": 0.34149596095085144, "learning_rate": 2.3127e-05, "loss": 0.0169, "step": 7712 }, { "epoch": 14.23084025854109, "grad_norm": 0.24113301932811737, "learning_rate": 2.313e-05, "loss": 0.0133, "step": 7713 }, { "epoch": 14.232686980609419, "grad_norm": 0.2770822048187256, "learning_rate": 2.3133e-05, "loss": 0.0076, "step": 7714 }, { "epoch": 14.234533702677748, "grad_norm": 0.37785083055496216, "learning_rate": 2.3136e-05, "loss": 0.0303, "step": 7715 }, { "epoch": 14.236380424746075, "grad_norm": 0.36604055762290955, "learning_rate": 2.3139e-05, "loss": 0.0237, "step": 7716 }, { "epoch": 14.238227146814404, "grad_norm": 0.316237211227417, "learning_rate": 2.3142e-05, "loss": 0.0194, "step": 7717 }, { "epoch": 14.240073868882734, "grad_norm": 0.3500533103942871, "learning_rate": 2.3145e-05, "loss": 0.0121, "step": 7718 }, { "epoch": 14.241920590951063, "grad_norm": 0.3890174329280853, "learning_rate": 2.3148e-05, "loss": 0.0092, "step": 7719 }, { "epoch": 14.24376731301939, "grad_norm": 1.027275562286377, "learning_rate": 2.3151000000000002e-05, "loss": 0.0269, "step": 7720 }, { "epoch": 14.24561403508772, "grad_norm": 0.4223162531852722, "learning_rate": 2.3154000000000002e-05, "loss": 0.014, "step": 7721 }, { "epoch": 14.247460757156048, "grad_norm": 0.37757349014282227, "learning_rate": 2.3157000000000002e-05, "loss": 0.0101, "step": 7722 }, { "epoch": 14.249307479224377, "grad_norm": 0.34131377935409546, "learning_rate": 2.3160000000000002e-05, "loss": 0.0143, "step": 7723 }, { "epoch": 14.251154201292705, "grad_norm": 0.4734109044075012, "learning_rate": 2.3163e-05, "loss": 0.017, "step": 7724 }, { "epoch": 14.253000923361034, "grad_norm": 0.5335391759872437, "learning_rate": 2.3166e-05, "loss": 0.016, "step": 7725 }, { "epoch": 14.254847645429363, "grad_norm": 0.25241535902023315, "learning_rate": 2.3169e-05, "loss": 0.0092, "step": 7726 }, { "epoch": 14.256694367497692, "grad_norm": 0.6295625567436218, "learning_rate": 2.3172e-05, "loss": 0.0169, "step": 7727 }, { "epoch": 14.25854108956602, "grad_norm": 0.7412222623825073, "learning_rate": 2.3175e-05, "loss": 0.0148, "step": 7728 }, { "epoch": 14.260387811634349, "grad_norm": 0.3130491375923157, "learning_rate": 2.3178e-05, "loss": 0.0114, "step": 7729 }, { "epoch": 14.262234533702678, "grad_norm": 0.31834471225738525, "learning_rate": 2.3181000000000004e-05, "loss": 0.0106, "step": 7730 }, { "epoch": 14.264081255771007, "grad_norm": 1.08151376247406, "learning_rate": 2.3184e-05, "loss": 0.0213, "step": 7731 }, { "epoch": 14.265927977839334, "grad_norm": 0.7608094215393066, "learning_rate": 2.3187e-05, "loss": 0.0191, "step": 7732 }, { "epoch": 14.267774699907664, "grad_norm": 0.23599159717559814, "learning_rate": 2.319e-05, "loss": 0.0148, "step": 7733 }, { "epoch": 14.269621421975993, "grad_norm": 0.6872748136520386, "learning_rate": 2.3193e-05, "loss": 0.0133, "step": 7734 }, { "epoch": 14.271468144044322, "grad_norm": 0.7782183885574341, "learning_rate": 2.3196e-05, "loss": 0.0184, "step": 7735 }, { "epoch": 14.27331486611265, "grad_norm": 0.5043119788169861, "learning_rate": 2.3199e-05, "loss": 0.016, "step": 7736 }, { "epoch": 14.275161588180978, "grad_norm": 0.7329913973808289, "learning_rate": 2.3202e-05, "loss": 0.0162, "step": 7737 }, { "epoch": 14.277008310249307, "grad_norm": 0.37354522943496704, "learning_rate": 2.3205e-05, "loss": 0.0131, "step": 7738 }, { "epoch": 14.278855032317637, "grad_norm": 1.0684969425201416, "learning_rate": 2.3208e-05, "loss": 0.2945, "step": 7739 }, { "epoch": 14.280701754385966, "grad_norm": 0.5377521514892578, "learning_rate": 2.3211000000000002e-05, "loss": 0.1278, "step": 7740 }, { "epoch": 14.282548476454293, "grad_norm": 0.5380083322525024, "learning_rate": 2.3214000000000002e-05, "loss": 0.1373, "step": 7741 }, { "epoch": 14.284395198522622, "grad_norm": 1.1396440267562866, "learning_rate": 2.3217e-05, "loss": 0.1496, "step": 7742 }, { "epoch": 14.286241920590951, "grad_norm": 0.8189786672592163, "learning_rate": 2.322e-05, "loss": 0.1442, "step": 7743 }, { "epoch": 14.28808864265928, "grad_norm": 0.5815137028694153, "learning_rate": 2.3223e-05, "loss": 0.1651, "step": 7744 }, { "epoch": 14.289935364727608, "grad_norm": 0.5112689137458801, "learning_rate": 2.3226e-05, "loss": 0.0993, "step": 7745 }, { "epoch": 14.291782086795937, "grad_norm": 0.8791669607162476, "learning_rate": 2.3229e-05, "loss": 0.1104, "step": 7746 }, { "epoch": 14.293628808864266, "grad_norm": 0.9186159372329712, "learning_rate": 2.3232e-05, "loss": 0.0836, "step": 7747 }, { "epoch": 14.295475530932595, "grad_norm": 0.6723326444625854, "learning_rate": 2.3235e-05, "loss": 0.0736, "step": 7748 }, { "epoch": 14.297322253000923, "grad_norm": 0.5478449463844299, "learning_rate": 2.3238e-05, "loss": 0.1027, "step": 7749 }, { "epoch": 14.299168975069252, "grad_norm": 0.38028210401535034, "learning_rate": 2.3241000000000003e-05, "loss": 0.0377, "step": 7750 }, { "epoch": 14.301015697137581, "grad_norm": 0.47234466671943665, "learning_rate": 2.3244000000000003e-05, "loss": 0.0761, "step": 7751 }, { "epoch": 14.30286241920591, "grad_norm": 0.47043994069099426, "learning_rate": 2.3247000000000003e-05, "loss": 0.0569, "step": 7752 }, { "epoch": 14.304709141274238, "grad_norm": 0.44927072525024414, "learning_rate": 2.3250000000000003e-05, "loss": 0.0366, "step": 7753 }, { "epoch": 14.306555863342567, "grad_norm": 0.3382532596588135, "learning_rate": 2.3253000000000003e-05, "loss": 0.0298, "step": 7754 }, { "epoch": 14.308402585410896, "grad_norm": 0.30027419328689575, "learning_rate": 2.3256e-05, "loss": 0.0199, "step": 7755 }, { "epoch": 14.310249307479225, "grad_norm": 0.44675812125205994, "learning_rate": 2.3259e-05, "loss": 0.0302, "step": 7756 }, { "epoch": 14.312096029547552, "grad_norm": 0.803776204586029, "learning_rate": 2.3262e-05, "loss": 0.0209, "step": 7757 }, { "epoch": 14.313942751615881, "grad_norm": 0.32607215642929077, "learning_rate": 2.3265e-05, "loss": 0.0135, "step": 7758 }, { "epoch": 14.31578947368421, "grad_norm": 0.30982378125190735, "learning_rate": 2.3267999999999998e-05, "loss": 0.0143, "step": 7759 }, { "epoch": 14.31763619575254, "grad_norm": 0.38955262303352356, "learning_rate": 2.3270999999999998e-05, "loss": 0.0189, "step": 7760 }, { "epoch": 14.319482917820867, "grad_norm": 0.34186357259750366, "learning_rate": 2.3274e-05, "loss": 0.0128, "step": 7761 }, { "epoch": 14.321329639889196, "grad_norm": 0.29279205203056335, "learning_rate": 2.3277e-05, "loss": 0.0154, "step": 7762 }, { "epoch": 14.323176361957525, "grad_norm": 0.37621551752090454, "learning_rate": 2.328e-05, "loss": 0.012, "step": 7763 }, { "epoch": 14.325023084025855, "grad_norm": 0.4087648093700409, "learning_rate": 2.3283e-05, "loss": 0.0186, "step": 7764 }, { "epoch": 14.326869806094184, "grad_norm": 0.303737998008728, "learning_rate": 2.3286e-05, "loss": 0.0136, "step": 7765 }, { "epoch": 14.328716528162511, "grad_norm": 0.5038633942604065, "learning_rate": 2.3289e-05, "loss": 0.016, "step": 7766 }, { "epoch": 14.33056325023084, "grad_norm": 0.8184044361114502, "learning_rate": 2.3292e-05, "loss": 0.0283, "step": 7767 }, { "epoch": 14.33240997229917, "grad_norm": 1.2842833995819092, "learning_rate": 2.3295e-05, "loss": 0.0478, "step": 7768 }, { "epoch": 14.334256694367498, "grad_norm": 0.36860495805740356, "learning_rate": 2.3298e-05, "loss": 0.0141, "step": 7769 }, { "epoch": 14.336103416435826, "grad_norm": 0.6347278952598572, "learning_rate": 2.3301e-05, "loss": 0.0214, "step": 7770 }, { "epoch": 14.337950138504155, "grad_norm": 0.37546491622924805, "learning_rate": 2.3304000000000003e-05, "loss": 0.0141, "step": 7771 }, { "epoch": 14.339796860572484, "grad_norm": 0.39249086380004883, "learning_rate": 2.3307000000000002e-05, "loss": 0.0153, "step": 7772 }, { "epoch": 14.341643582640813, "grad_norm": 0.6554223895072937, "learning_rate": 2.3310000000000002e-05, "loss": 0.026, "step": 7773 }, { "epoch": 14.34349030470914, "grad_norm": 1.0025122165679932, "learning_rate": 2.3313000000000002e-05, "loss": 0.0111, "step": 7774 }, { "epoch": 14.34533702677747, "grad_norm": 0.6032886505126953, "learning_rate": 2.3316000000000002e-05, "loss": 0.0159, "step": 7775 }, { "epoch": 14.347183748845799, "grad_norm": 0.31834331154823303, "learning_rate": 2.3319e-05, "loss": 0.0152, "step": 7776 }, { "epoch": 14.349030470914128, "grad_norm": 0.7992693781852722, "learning_rate": 2.3322e-05, "loss": 0.0303, "step": 7777 }, { "epoch": 14.350877192982455, "grad_norm": 0.3116844892501831, "learning_rate": 2.3325e-05, "loss": 0.0156, "step": 7778 }, { "epoch": 14.352723915050785, "grad_norm": 0.2716032564640045, "learning_rate": 2.3328e-05, "loss": 0.0093, "step": 7779 }, { "epoch": 14.354570637119114, "grad_norm": 0.2736084759235382, "learning_rate": 2.3330999999999997e-05, "loss": 0.0083, "step": 7780 }, { "epoch": 14.356417359187443, "grad_norm": 0.6578453183174133, "learning_rate": 2.3334e-05, "loss": 0.0212, "step": 7781 }, { "epoch": 14.35826408125577, "grad_norm": 0.33356401324272156, "learning_rate": 2.3337e-05, "loss": 0.0104, "step": 7782 }, { "epoch": 14.3601108033241, "grad_norm": 0.37421756982803345, "learning_rate": 2.334e-05, "loss": 0.016, "step": 7783 }, { "epoch": 14.361957525392429, "grad_norm": 0.48051688075065613, "learning_rate": 2.3343e-05, "loss": 0.0171, "step": 7784 }, { "epoch": 14.363804247460758, "grad_norm": 0.5618257522583008, "learning_rate": 2.3346e-05, "loss": 0.0261, "step": 7785 }, { "epoch": 14.365650969529085, "grad_norm": 0.3171500861644745, "learning_rate": 2.3349e-05, "loss": 0.01, "step": 7786 }, { "epoch": 14.367497691597414, "grad_norm": 0.531436562538147, "learning_rate": 2.3352e-05, "loss": 0.0131, "step": 7787 }, { "epoch": 14.369344413665743, "grad_norm": 0.9091717600822449, "learning_rate": 2.3355e-05, "loss": 0.0163, "step": 7788 }, { "epoch": 14.371191135734072, "grad_norm": 0.6591691374778748, "learning_rate": 2.3358e-05, "loss": 0.191, "step": 7789 }, { "epoch": 14.373037857802402, "grad_norm": 0.5781044363975525, "learning_rate": 2.3361e-05, "loss": 0.1545, "step": 7790 }, { "epoch": 14.374884579870729, "grad_norm": 0.7448709607124329, "learning_rate": 2.3364000000000002e-05, "loss": 0.174, "step": 7791 }, { "epoch": 14.376731301939058, "grad_norm": 0.6972495913505554, "learning_rate": 2.3367000000000002e-05, "loss": 0.2, "step": 7792 }, { "epoch": 14.378578024007387, "grad_norm": 0.7102606296539307, "learning_rate": 2.337e-05, "loss": 0.1773, "step": 7793 }, { "epoch": 14.380424746075716, "grad_norm": 0.4949325621128082, "learning_rate": 2.3373e-05, "loss": 0.1013, "step": 7794 }, { "epoch": 14.382271468144044, "grad_norm": 1.218247652053833, "learning_rate": 2.3376e-05, "loss": 0.1204, "step": 7795 }, { "epoch": 14.384118190212373, "grad_norm": 0.5108643770217896, "learning_rate": 2.3379e-05, "loss": 0.0815, "step": 7796 }, { "epoch": 14.385964912280702, "grad_norm": 0.569215714931488, "learning_rate": 2.3382e-05, "loss": 0.0851, "step": 7797 }, { "epoch": 14.387811634349031, "grad_norm": 0.6716434359550476, "learning_rate": 2.3385e-05, "loss": 0.0753, "step": 7798 }, { "epoch": 14.389658356417359, "grad_norm": 0.32701218128204346, "learning_rate": 2.3388e-05, "loss": 0.0476, "step": 7799 }, { "epoch": 14.391505078485688, "grad_norm": 0.6348007321357727, "learning_rate": 2.3391e-05, "loss": 0.0478, "step": 7800 }, { "epoch": 14.393351800554017, "grad_norm": 0.423272967338562, "learning_rate": 2.3394000000000003e-05, "loss": 0.0638, "step": 7801 }, { "epoch": 14.395198522622346, "grad_norm": 0.4461739659309387, "learning_rate": 2.3397000000000003e-05, "loss": 0.0546, "step": 7802 }, { "epoch": 14.397045244690673, "grad_norm": 0.42833179235458374, "learning_rate": 2.3400000000000003e-05, "loss": 0.0362, "step": 7803 }, { "epoch": 14.398891966759003, "grad_norm": 0.32603225111961365, "learning_rate": 2.3403e-05, "loss": 0.0235, "step": 7804 }, { "epoch": 14.400738688827332, "grad_norm": 0.3583817780017853, "learning_rate": 2.3406e-05, "loss": 0.0227, "step": 7805 }, { "epoch": 14.40258541089566, "grad_norm": 0.3128528594970703, "learning_rate": 2.3409e-05, "loss": 0.0217, "step": 7806 }, { "epoch": 14.404432132963988, "grad_norm": 0.37281113862991333, "learning_rate": 2.3412e-05, "loss": 0.0135, "step": 7807 }, { "epoch": 14.406278855032317, "grad_norm": 0.5349627137184143, "learning_rate": 2.3415e-05, "loss": 0.0169, "step": 7808 }, { "epoch": 14.408125577100646, "grad_norm": 0.5006557703018188, "learning_rate": 2.3418e-05, "loss": 0.0434, "step": 7809 }, { "epoch": 14.409972299168976, "grad_norm": 0.5111393332481384, "learning_rate": 2.3420999999999998e-05, "loss": 0.0145, "step": 7810 }, { "epoch": 14.411819021237303, "grad_norm": 0.3418116867542267, "learning_rate": 2.3424e-05, "loss": 0.0188, "step": 7811 }, { "epoch": 14.413665743305632, "grad_norm": 0.33915090560913086, "learning_rate": 2.3427e-05, "loss": 0.0185, "step": 7812 }, { "epoch": 14.415512465373961, "grad_norm": 0.36737731099128723, "learning_rate": 2.343e-05, "loss": 0.0212, "step": 7813 }, { "epoch": 14.41735918744229, "grad_norm": 0.14595548808574677, "learning_rate": 2.3433e-05, "loss": 0.0051, "step": 7814 }, { "epoch": 14.41920590951062, "grad_norm": 0.3603948950767517, "learning_rate": 2.3436e-05, "loss": 0.0158, "step": 7815 }, { "epoch": 14.421052631578947, "grad_norm": 0.35886791348457336, "learning_rate": 2.3439e-05, "loss": 0.011, "step": 7816 }, { "epoch": 14.422899353647276, "grad_norm": 0.4424641728401184, "learning_rate": 2.3442e-05, "loss": 0.0139, "step": 7817 }, { "epoch": 14.424746075715605, "grad_norm": 0.21980373561382294, "learning_rate": 2.3445e-05, "loss": 0.0076, "step": 7818 }, { "epoch": 14.426592797783934, "grad_norm": 0.2293301224708557, "learning_rate": 2.3448e-05, "loss": 0.0113, "step": 7819 }, { "epoch": 14.428439519852262, "grad_norm": 0.4105096161365509, "learning_rate": 2.3451e-05, "loss": 0.0114, "step": 7820 }, { "epoch": 14.43028624192059, "grad_norm": 0.5419213175773621, "learning_rate": 2.3454000000000003e-05, "loss": 0.0182, "step": 7821 }, { "epoch": 14.43213296398892, "grad_norm": 0.21369367837905884, "learning_rate": 2.3457000000000003e-05, "loss": 0.0086, "step": 7822 }, { "epoch": 14.43397968605725, "grad_norm": 0.39795318245887756, "learning_rate": 2.3460000000000002e-05, "loss": 0.0152, "step": 7823 }, { "epoch": 14.435826408125576, "grad_norm": 0.6262432932853699, "learning_rate": 2.3463000000000002e-05, "loss": 0.0244, "step": 7824 }, { "epoch": 14.437673130193906, "grad_norm": 0.5282998085021973, "learning_rate": 2.3466000000000002e-05, "loss": 0.0165, "step": 7825 }, { "epoch": 14.439519852262235, "grad_norm": 0.7346487045288086, "learning_rate": 2.3469000000000002e-05, "loss": 0.0245, "step": 7826 }, { "epoch": 14.441366574330564, "grad_norm": 0.45807477831840515, "learning_rate": 2.3472e-05, "loss": 0.0092, "step": 7827 }, { "epoch": 14.443213296398891, "grad_norm": 0.5049607753753662, "learning_rate": 2.3475e-05, "loss": 0.0169, "step": 7828 }, { "epoch": 14.44506001846722, "grad_norm": 0.5809155702590942, "learning_rate": 2.3477999999999998e-05, "loss": 0.0192, "step": 7829 }, { "epoch": 14.44690674053555, "grad_norm": 0.45647743344306946, "learning_rate": 2.3480999999999998e-05, "loss": 0.0137, "step": 7830 }, { "epoch": 14.448753462603879, "grad_norm": 0.5045316815376282, "learning_rate": 2.3484e-05, "loss": 0.0242, "step": 7831 }, { "epoch": 14.450600184672206, "grad_norm": 0.742444634437561, "learning_rate": 2.3487e-05, "loss": 0.0121, "step": 7832 }, { "epoch": 14.452446906740535, "grad_norm": 0.8437967896461487, "learning_rate": 2.349e-05, "loss": 0.0217, "step": 7833 }, { "epoch": 14.454293628808864, "grad_norm": 0.2819928228855133, "learning_rate": 2.3493e-05, "loss": 0.0103, "step": 7834 }, { "epoch": 14.456140350877194, "grad_norm": 0.2498871237039566, "learning_rate": 2.3496e-05, "loss": 0.0074, "step": 7835 }, { "epoch": 14.45798707294552, "grad_norm": 0.6369275450706482, "learning_rate": 2.3499e-05, "loss": 0.025, "step": 7836 }, { "epoch": 14.45983379501385, "grad_norm": 0.5697141885757446, "learning_rate": 2.3502e-05, "loss": 0.0207, "step": 7837 }, { "epoch": 14.46168051708218, "grad_norm": 0.7965555191040039, "learning_rate": 2.3505e-05, "loss": 0.0286, "step": 7838 }, { "epoch": 14.463527239150508, "grad_norm": 1.235569715499878, "learning_rate": 2.3508e-05, "loss": 0.2713, "step": 7839 }, { "epoch": 14.465373961218837, "grad_norm": 0.6825758218765259, "learning_rate": 2.3511e-05, "loss": 0.1966, "step": 7840 }, { "epoch": 14.467220683287165, "grad_norm": 0.6262037754058838, "learning_rate": 2.3514000000000002e-05, "loss": 0.1178, "step": 7841 }, { "epoch": 14.469067405355494, "grad_norm": 1.8128679990768433, "learning_rate": 2.3517000000000002e-05, "loss": 0.1999, "step": 7842 }, { "epoch": 14.470914127423823, "grad_norm": 0.5639660954475403, "learning_rate": 2.3520000000000002e-05, "loss": 0.1532, "step": 7843 }, { "epoch": 14.472760849492152, "grad_norm": 0.5506031513214111, "learning_rate": 2.3523e-05, "loss": 0.1075, "step": 7844 }, { "epoch": 14.47460757156048, "grad_norm": 0.49700742959976196, "learning_rate": 2.3526e-05, "loss": 0.1151, "step": 7845 }, { "epoch": 14.476454293628809, "grad_norm": 0.5941647887229919, "learning_rate": 2.3529e-05, "loss": 0.115, "step": 7846 }, { "epoch": 14.478301015697138, "grad_norm": 0.6988394260406494, "learning_rate": 2.3532e-05, "loss": 0.0892, "step": 7847 }, { "epoch": 14.480147737765467, "grad_norm": 0.5593818426132202, "learning_rate": 2.3535e-05, "loss": 0.0557, "step": 7848 }, { "epoch": 14.481994459833794, "grad_norm": 0.41339749097824097, "learning_rate": 2.3538e-05, "loss": 0.0509, "step": 7849 }, { "epoch": 14.483841181902124, "grad_norm": 1.1100635528564453, "learning_rate": 2.3541e-05, "loss": 0.0631, "step": 7850 }, { "epoch": 14.485687903970453, "grad_norm": 0.7645638585090637, "learning_rate": 2.3544000000000004e-05, "loss": 0.1023, "step": 7851 }, { "epoch": 14.487534626038782, "grad_norm": 0.7224724888801575, "learning_rate": 2.3547000000000003e-05, "loss": 0.0816, "step": 7852 }, { "epoch": 14.48938134810711, "grad_norm": 0.26063552498817444, "learning_rate": 2.3550000000000003e-05, "loss": 0.0259, "step": 7853 }, { "epoch": 14.491228070175438, "grad_norm": 0.6297289729118347, "learning_rate": 2.3553e-05, "loss": 0.0648, "step": 7854 }, { "epoch": 14.493074792243767, "grad_norm": 0.5482573509216309, "learning_rate": 2.3556e-05, "loss": 0.0513, "step": 7855 }, { "epoch": 14.494921514312097, "grad_norm": 0.5147392153739929, "learning_rate": 2.3559e-05, "loss": 0.0398, "step": 7856 }, { "epoch": 14.496768236380424, "grad_norm": 0.398120641708374, "learning_rate": 2.3562e-05, "loss": 0.0416, "step": 7857 }, { "epoch": 14.498614958448753, "grad_norm": 0.3588913679122925, "learning_rate": 2.3565e-05, "loss": 0.0191, "step": 7858 }, { "epoch": 14.500461680517082, "grad_norm": 0.28668269515037537, "learning_rate": 2.3568e-05, "loss": 0.0169, "step": 7859 }, { "epoch": 14.502308402585411, "grad_norm": 0.3392992913722992, "learning_rate": 2.3571e-05, "loss": 0.0221, "step": 7860 }, { "epoch": 14.504155124653739, "grad_norm": 0.2852858901023865, "learning_rate": 2.3574e-05, "loss": 0.0154, "step": 7861 }, { "epoch": 14.506001846722068, "grad_norm": 0.27077481150627136, "learning_rate": 2.3577e-05, "loss": 0.013, "step": 7862 }, { "epoch": 14.507848568790397, "grad_norm": 0.3104352355003357, "learning_rate": 2.358e-05, "loss": 0.0121, "step": 7863 }, { "epoch": 14.509695290858726, "grad_norm": 0.32454049587249756, "learning_rate": 2.3583e-05, "loss": 0.0218, "step": 7864 }, { "epoch": 14.511542012927055, "grad_norm": 0.7309545278549194, "learning_rate": 2.3586e-05, "loss": 0.0169, "step": 7865 }, { "epoch": 14.513388734995383, "grad_norm": 0.20314842462539673, "learning_rate": 2.3589e-05, "loss": 0.0071, "step": 7866 }, { "epoch": 14.515235457063712, "grad_norm": 0.6824448108673096, "learning_rate": 2.3592e-05, "loss": 0.0474, "step": 7867 }, { "epoch": 14.517082179132041, "grad_norm": 0.6702181696891785, "learning_rate": 2.3595e-05, "loss": 0.0143, "step": 7868 }, { "epoch": 14.51892890120037, "grad_norm": 0.5272572636604309, "learning_rate": 2.3598e-05, "loss": 0.0121, "step": 7869 }, { "epoch": 14.520775623268698, "grad_norm": 0.5622138381004333, "learning_rate": 2.3601e-05, "loss": 0.0488, "step": 7870 }, { "epoch": 14.522622345337027, "grad_norm": 0.26477548480033875, "learning_rate": 2.3604000000000003e-05, "loss": 0.0078, "step": 7871 }, { "epoch": 14.524469067405356, "grad_norm": 0.45857515931129456, "learning_rate": 2.3607000000000003e-05, "loss": 0.0196, "step": 7872 }, { "epoch": 14.526315789473685, "grad_norm": 0.6599807739257812, "learning_rate": 2.3610000000000003e-05, "loss": 0.0134, "step": 7873 }, { "epoch": 14.528162511542012, "grad_norm": 0.3410775363445282, "learning_rate": 2.3613000000000002e-05, "loss": 0.0098, "step": 7874 }, { "epoch": 14.530009233610341, "grad_norm": 0.5473353266716003, "learning_rate": 2.3616000000000002e-05, "loss": 0.0162, "step": 7875 }, { "epoch": 14.53185595567867, "grad_norm": 0.626075267791748, "learning_rate": 2.3619000000000002e-05, "loss": 0.0172, "step": 7876 }, { "epoch": 14.533702677747, "grad_norm": 0.507577121257782, "learning_rate": 2.3622000000000002e-05, "loss": 0.0161, "step": 7877 }, { "epoch": 14.535549399815327, "grad_norm": 0.44902390241622925, "learning_rate": 2.3624999999999998e-05, "loss": 0.0278, "step": 7878 }, { "epoch": 14.537396121883656, "grad_norm": 0.4249652922153473, "learning_rate": 2.3627999999999998e-05, "loss": 0.0154, "step": 7879 }, { "epoch": 14.539242843951985, "grad_norm": 0.5330266356468201, "learning_rate": 2.3630999999999998e-05, "loss": 0.0079, "step": 7880 }, { "epoch": 14.541089566020315, "grad_norm": 0.5065855979919434, "learning_rate": 2.3633999999999998e-05, "loss": 0.0167, "step": 7881 }, { "epoch": 14.542936288088642, "grad_norm": 0.4754953384399414, "learning_rate": 2.3637e-05, "loss": 0.0159, "step": 7882 }, { "epoch": 14.544783010156971, "grad_norm": 0.4979807138442993, "learning_rate": 2.364e-05, "loss": 0.0181, "step": 7883 }, { "epoch": 14.5466297322253, "grad_norm": 0.5053218007087708, "learning_rate": 2.3643e-05, "loss": 0.0133, "step": 7884 }, { "epoch": 14.54847645429363, "grad_norm": 0.4692944586277008, "learning_rate": 2.3646e-05, "loss": 0.0168, "step": 7885 }, { "epoch": 14.550323176361957, "grad_norm": 0.521899938583374, "learning_rate": 2.3649e-05, "loss": 0.0204, "step": 7886 }, { "epoch": 14.552169898430286, "grad_norm": 0.8429057002067566, "learning_rate": 2.3652e-05, "loss": 0.0214, "step": 7887 }, { "epoch": 14.554016620498615, "grad_norm": 0.37914028763771057, "learning_rate": 2.3655e-05, "loss": 0.0169, "step": 7888 }, { "epoch": 14.555863342566944, "grad_norm": 0.636502742767334, "learning_rate": 2.3658e-05, "loss": 0.184, "step": 7889 }, { "epoch": 14.557710064635273, "grad_norm": 0.7152822017669678, "learning_rate": 2.3661e-05, "loss": 0.1724, "step": 7890 }, { "epoch": 14.5595567867036, "grad_norm": 0.49054843187332153, "learning_rate": 2.3664e-05, "loss": 0.1158, "step": 7891 }, { "epoch": 14.56140350877193, "grad_norm": 0.5772072076797485, "learning_rate": 2.3667000000000002e-05, "loss": 0.1338, "step": 7892 }, { "epoch": 14.563250230840259, "grad_norm": 0.5550270676612854, "learning_rate": 2.3670000000000002e-05, "loss": 0.1215, "step": 7893 }, { "epoch": 14.565096952908588, "grad_norm": 0.5168792605400085, "learning_rate": 2.3673000000000002e-05, "loss": 0.0839, "step": 7894 }, { "epoch": 14.566943674976915, "grad_norm": 0.47602367401123047, "learning_rate": 2.3676e-05, "loss": 0.0906, "step": 7895 }, { "epoch": 14.568790397045245, "grad_norm": 0.6356981992721558, "learning_rate": 2.3679e-05, "loss": 0.0955, "step": 7896 }, { "epoch": 14.570637119113574, "grad_norm": 0.5250639319419861, "learning_rate": 2.3682e-05, "loss": 0.0971, "step": 7897 }, { "epoch": 14.572483841181903, "grad_norm": 0.41396352648735046, "learning_rate": 2.3685e-05, "loss": 0.0668, "step": 7898 }, { "epoch": 14.57433056325023, "grad_norm": 0.5052721500396729, "learning_rate": 2.3688e-05, "loss": 0.0602, "step": 7899 }, { "epoch": 14.57617728531856, "grad_norm": 0.4817100763320923, "learning_rate": 2.3691e-05, "loss": 0.1045, "step": 7900 }, { "epoch": 14.578024007386889, "grad_norm": 0.33338767290115356, "learning_rate": 2.3694e-05, "loss": 0.0351, "step": 7901 }, { "epoch": 14.579870729455218, "grad_norm": 0.41364586353302, "learning_rate": 2.3697000000000004e-05, "loss": 0.0441, "step": 7902 }, { "epoch": 14.581717451523545, "grad_norm": 0.47825878858566284, "learning_rate": 2.37e-05, "loss": 0.0475, "step": 7903 }, { "epoch": 14.583564173591874, "grad_norm": 0.5705379247665405, "learning_rate": 2.3703e-05, "loss": 0.0455, "step": 7904 }, { "epoch": 14.585410895660203, "grad_norm": 0.4665312170982361, "learning_rate": 2.3706e-05, "loss": 0.0301, "step": 7905 }, { "epoch": 14.587257617728532, "grad_norm": 0.2677137553691864, "learning_rate": 2.3709e-05, "loss": 0.0145, "step": 7906 }, { "epoch": 14.58910433979686, "grad_norm": 0.34597480297088623, "learning_rate": 2.3712e-05, "loss": 0.0105, "step": 7907 }, { "epoch": 14.590951061865189, "grad_norm": 0.346442848443985, "learning_rate": 2.3715e-05, "loss": 0.0527, "step": 7908 }, { "epoch": 14.592797783933518, "grad_norm": 0.4659012258052826, "learning_rate": 2.3718e-05, "loss": 0.0168, "step": 7909 }, { "epoch": 14.594644506001847, "grad_norm": 0.3821612000465393, "learning_rate": 2.3721e-05, "loss": 0.0266, "step": 7910 }, { "epoch": 14.596491228070175, "grad_norm": 0.4945525825023651, "learning_rate": 2.3724e-05, "loss": 0.0283, "step": 7911 }, { "epoch": 14.598337950138504, "grad_norm": 0.37642475962638855, "learning_rate": 2.3727000000000002e-05, "loss": 0.0213, "step": 7912 }, { "epoch": 14.600184672206833, "grad_norm": 0.5657537579536438, "learning_rate": 2.373e-05, "loss": 0.0288, "step": 7913 }, { "epoch": 14.602031394275162, "grad_norm": 0.5660479664802551, "learning_rate": 2.3733e-05, "loss": 0.0282, "step": 7914 }, { "epoch": 14.603878116343491, "grad_norm": 0.4243585467338562, "learning_rate": 2.3736e-05, "loss": 0.0186, "step": 7915 }, { "epoch": 14.605724838411819, "grad_norm": 0.3085576295852661, "learning_rate": 2.3739e-05, "loss": 0.0117, "step": 7916 }, { "epoch": 14.607571560480148, "grad_norm": 0.223404198884964, "learning_rate": 2.3742e-05, "loss": 0.0106, "step": 7917 }, { "epoch": 14.609418282548477, "grad_norm": 0.4205861985683441, "learning_rate": 2.3745e-05, "loss": 0.0199, "step": 7918 }, { "epoch": 14.611265004616806, "grad_norm": 0.35712581872940063, "learning_rate": 2.3748e-05, "loss": 0.0156, "step": 7919 }, { "epoch": 14.613111726685133, "grad_norm": 0.37940770387649536, "learning_rate": 2.3751e-05, "loss": 0.0313, "step": 7920 }, { "epoch": 14.614958448753463, "grad_norm": 0.314486026763916, "learning_rate": 2.3754e-05, "loss": 0.0168, "step": 7921 }, { "epoch": 14.616805170821792, "grad_norm": 0.37937620282173157, "learning_rate": 2.3757000000000003e-05, "loss": 0.0139, "step": 7922 }, { "epoch": 14.61865189289012, "grad_norm": 0.36704617738723755, "learning_rate": 2.3760000000000003e-05, "loss": 0.0252, "step": 7923 }, { "epoch": 14.620498614958448, "grad_norm": 0.25925007462501526, "learning_rate": 2.3763000000000003e-05, "loss": 0.0096, "step": 7924 }, { "epoch": 14.622345337026777, "grad_norm": 0.38265901803970337, "learning_rate": 2.3766000000000003e-05, "loss": 0.0115, "step": 7925 }, { "epoch": 14.624192059095106, "grad_norm": 0.5699060559272766, "learning_rate": 2.3769000000000002e-05, "loss": 0.0143, "step": 7926 }, { "epoch": 14.626038781163436, "grad_norm": 0.369829386472702, "learning_rate": 2.3772e-05, "loss": 0.0146, "step": 7927 }, { "epoch": 14.627885503231763, "grad_norm": 0.5048293471336365, "learning_rate": 2.3775e-05, "loss": 0.0138, "step": 7928 }, { "epoch": 14.629732225300092, "grad_norm": 0.26562240719795227, "learning_rate": 2.3778e-05, "loss": 0.0103, "step": 7929 }, { "epoch": 14.631578947368421, "grad_norm": 0.28638893365859985, "learning_rate": 2.3780999999999998e-05, "loss": 0.0119, "step": 7930 }, { "epoch": 14.63342566943675, "grad_norm": 0.5839011073112488, "learning_rate": 2.3783999999999998e-05, "loss": 0.0164, "step": 7931 }, { "epoch": 14.635272391505078, "grad_norm": 0.3103630840778351, "learning_rate": 2.3787e-05, "loss": 0.0116, "step": 7932 }, { "epoch": 14.637119113573407, "grad_norm": 1.3576741218566895, "learning_rate": 2.379e-05, "loss": 0.014, "step": 7933 }, { "epoch": 14.638965835641736, "grad_norm": 1.0917797088623047, "learning_rate": 2.3793e-05, "loss": 0.0227, "step": 7934 }, { "epoch": 14.640812557710065, "grad_norm": 0.32580968737602234, "learning_rate": 2.3796e-05, "loss": 0.0073, "step": 7935 }, { "epoch": 14.642659279778393, "grad_norm": 0.285483181476593, "learning_rate": 2.3799e-05, "loss": 0.0104, "step": 7936 }, { "epoch": 14.644506001846722, "grad_norm": 0.40575113892555237, "learning_rate": 2.3802e-05, "loss": 0.0133, "step": 7937 }, { "epoch": 14.64635272391505, "grad_norm": 0.3636173605918884, "learning_rate": 2.3805e-05, "loss": 0.0177, "step": 7938 }, { "epoch": 14.64819944598338, "grad_norm": 0.8212683200836182, "learning_rate": 2.3808e-05, "loss": 0.2281, "step": 7939 }, { "epoch": 14.65004616805171, "grad_norm": 0.9566912651062012, "learning_rate": 2.3811e-05, "loss": 0.2628, "step": 7940 }, { "epoch": 14.651892890120036, "grad_norm": 0.6159496903419495, "learning_rate": 2.3814e-05, "loss": 0.1634, "step": 7941 }, { "epoch": 14.653739612188366, "grad_norm": 0.5956009030342102, "learning_rate": 2.3817000000000003e-05, "loss": 0.1371, "step": 7942 }, { "epoch": 14.655586334256695, "grad_norm": 0.7215802669525146, "learning_rate": 2.3820000000000002e-05, "loss": 0.1136, "step": 7943 }, { "epoch": 14.657433056325024, "grad_norm": 0.41611161828041077, "learning_rate": 2.3823000000000002e-05, "loss": 0.0957, "step": 7944 }, { "epoch": 14.659279778393351, "grad_norm": 0.5038338899612427, "learning_rate": 2.3826000000000002e-05, "loss": 0.0958, "step": 7945 }, { "epoch": 14.66112650046168, "grad_norm": 0.5414931774139404, "learning_rate": 2.3829000000000002e-05, "loss": 0.1228, "step": 7946 }, { "epoch": 14.66297322253001, "grad_norm": 0.5237676501274109, "learning_rate": 2.3832e-05, "loss": 0.0741, "step": 7947 }, { "epoch": 14.664819944598339, "grad_norm": 1.1172302961349487, "learning_rate": 2.3835e-05, "loss": 0.1129, "step": 7948 }, { "epoch": 14.666666666666666, "grad_norm": 0.6457228660583496, "learning_rate": 2.3838e-05, "loss": 0.0944, "step": 7949 }, { "epoch": 14.668513388734995, "grad_norm": 0.576670229434967, "learning_rate": 2.3841e-05, "loss": 0.0673, "step": 7950 }, { "epoch": 14.670360110803324, "grad_norm": 0.3982366919517517, "learning_rate": 2.3844e-05, "loss": 0.0644, "step": 7951 }, { "epoch": 14.672206832871654, "grad_norm": 1.198979139328003, "learning_rate": 2.3847e-05, "loss": 0.0439, "step": 7952 }, { "epoch": 14.67405355493998, "grad_norm": 0.3780513107776642, "learning_rate": 2.385e-05, "loss": 0.046, "step": 7953 }, { "epoch": 14.67590027700831, "grad_norm": 0.3405984938144684, "learning_rate": 2.3853e-05, "loss": 0.0152, "step": 7954 }, { "epoch": 14.67774699907664, "grad_norm": 0.3067670166492462, "learning_rate": 2.3856e-05, "loss": 0.0197, "step": 7955 }, { "epoch": 14.679593721144968, "grad_norm": 0.6061209440231323, "learning_rate": 2.3859e-05, "loss": 0.0246, "step": 7956 }, { "epoch": 14.681440443213296, "grad_norm": 0.4019205868244171, "learning_rate": 2.3862e-05, "loss": 0.0198, "step": 7957 }, { "epoch": 14.683287165281625, "grad_norm": 0.49981725215911865, "learning_rate": 2.3865e-05, "loss": 0.0207, "step": 7958 }, { "epoch": 14.685133887349954, "grad_norm": 0.3462250828742981, "learning_rate": 2.3868e-05, "loss": 0.0238, "step": 7959 }, { "epoch": 14.686980609418283, "grad_norm": 0.381491094827652, "learning_rate": 2.3871e-05, "loss": 0.0231, "step": 7960 }, { "epoch": 14.68882733148661, "grad_norm": 0.42520666122436523, "learning_rate": 2.3874e-05, "loss": 0.0258, "step": 7961 }, { "epoch": 14.69067405355494, "grad_norm": 0.417881578207016, "learning_rate": 2.3877000000000002e-05, "loss": 0.0287, "step": 7962 }, { "epoch": 14.692520775623269, "grad_norm": 0.43953225016593933, "learning_rate": 2.3880000000000002e-05, "loss": 0.0199, "step": 7963 }, { "epoch": 14.694367497691598, "grad_norm": 0.3966647982597351, "learning_rate": 2.3883e-05, "loss": 0.0178, "step": 7964 }, { "epoch": 14.696214219759927, "grad_norm": 0.9427505731582642, "learning_rate": 2.3886e-05, "loss": 0.036, "step": 7965 }, { "epoch": 14.698060941828254, "grad_norm": 0.461514949798584, "learning_rate": 2.3889e-05, "loss": 0.019, "step": 7966 }, { "epoch": 14.699907663896584, "grad_norm": 0.3639925718307495, "learning_rate": 2.3892e-05, "loss": 0.0103, "step": 7967 }, { "epoch": 14.701754385964913, "grad_norm": 0.3159520924091339, "learning_rate": 2.3895e-05, "loss": 0.0201, "step": 7968 }, { "epoch": 14.703601108033242, "grad_norm": 0.44609788060188293, "learning_rate": 2.3898e-05, "loss": 0.0196, "step": 7969 }, { "epoch": 14.70544783010157, "grad_norm": 0.4839285612106323, "learning_rate": 2.3901e-05, "loss": 0.0253, "step": 7970 }, { "epoch": 14.707294552169898, "grad_norm": 0.2293621152639389, "learning_rate": 2.3904e-05, "loss": 0.0074, "step": 7971 }, { "epoch": 14.709141274238227, "grad_norm": 0.26273688673973083, "learning_rate": 2.3907000000000003e-05, "loss": 0.0133, "step": 7972 }, { "epoch": 14.710987996306557, "grad_norm": 0.24619637429714203, "learning_rate": 2.3910000000000003e-05, "loss": 0.0125, "step": 7973 }, { "epoch": 14.712834718374884, "grad_norm": 0.8715478777885437, "learning_rate": 2.3913000000000003e-05, "loss": 0.0371, "step": 7974 }, { "epoch": 14.714681440443213, "grad_norm": 0.334750235080719, "learning_rate": 2.3916000000000003e-05, "loss": 0.0129, "step": 7975 }, { "epoch": 14.716528162511542, "grad_norm": 0.27692723274230957, "learning_rate": 2.3919e-05, "loss": 0.0161, "step": 7976 }, { "epoch": 14.718374884579871, "grad_norm": 0.5121621489524841, "learning_rate": 2.3922e-05, "loss": 0.0272, "step": 7977 }, { "epoch": 14.720221606648199, "grad_norm": 0.46436607837677, "learning_rate": 2.3925e-05, "loss": 0.0216, "step": 7978 }, { "epoch": 14.722068328716528, "grad_norm": 0.683923065662384, "learning_rate": 2.3928e-05, "loss": 0.0215, "step": 7979 }, { "epoch": 14.723915050784857, "grad_norm": 0.283103346824646, "learning_rate": 2.3931e-05, "loss": 0.0199, "step": 7980 }, { "epoch": 14.725761772853186, "grad_norm": 0.31182169914245605, "learning_rate": 2.3933999999999998e-05, "loss": 0.0139, "step": 7981 }, { "epoch": 14.727608494921514, "grad_norm": 0.36937060952186584, "learning_rate": 2.3937e-05, "loss": 0.0166, "step": 7982 }, { "epoch": 14.729455216989843, "grad_norm": 0.44678324460983276, "learning_rate": 2.394e-05, "loss": 0.0181, "step": 7983 }, { "epoch": 14.731301939058172, "grad_norm": 0.41206908226013184, "learning_rate": 2.3943e-05, "loss": 0.0191, "step": 7984 }, { "epoch": 14.733148661126501, "grad_norm": 0.5686429142951965, "learning_rate": 2.3946e-05, "loss": 0.0221, "step": 7985 }, { "epoch": 14.734995383194828, "grad_norm": 0.5409742593765259, "learning_rate": 2.3949e-05, "loss": 0.0241, "step": 7986 }, { "epoch": 14.736842105263158, "grad_norm": 0.6201287508010864, "learning_rate": 2.3952e-05, "loss": 0.0248, "step": 7987 }, { "epoch": 14.738688827331487, "grad_norm": 2.362740993499756, "learning_rate": 2.3955e-05, "loss": 0.0808, "step": 7988 }, { "epoch": 14.740535549399816, "grad_norm": 0.7609465718269348, "learning_rate": 2.3958e-05, "loss": 0.2072, "step": 7989 }, { "epoch": 14.742382271468145, "grad_norm": 0.48303288221359253, "learning_rate": 2.3961e-05, "loss": 0.1277, "step": 7990 }, { "epoch": 14.744228993536472, "grad_norm": 0.8768311142921448, "learning_rate": 2.3964e-05, "loss": 0.1706, "step": 7991 }, { "epoch": 14.746075715604801, "grad_norm": 0.6291162967681885, "learning_rate": 2.3967000000000003e-05, "loss": 0.1365, "step": 7992 }, { "epoch": 14.74792243767313, "grad_norm": 0.5517061948776245, "learning_rate": 2.3970000000000003e-05, "loss": 0.1203, "step": 7993 }, { "epoch": 14.749769159741458, "grad_norm": 0.5301569104194641, "learning_rate": 2.3973000000000002e-05, "loss": 0.13, "step": 7994 }, { "epoch": 14.751615881809787, "grad_norm": 0.4237849712371826, "learning_rate": 2.3976000000000002e-05, "loss": 0.0786, "step": 7995 }, { "epoch": 14.753462603878116, "grad_norm": 0.7277824282646179, "learning_rate": 2.3979000000000002e-05, "loss": 0.0815, "step": 7996 }, { "epoch": 14.755309325946445, "grad_norm": 0.4558088779449463, "learning_rate": 2.3982000000000002e-05, "loss": 0.0836, "step": 7997 }, { "epoch": 14.757156048014775, "grad_norm": 0.3935530185699463, "learning_rate": 2.3985e-05, "loss": 0.0501, "step": 7998 }, { "epoch": 14.759002770083102, "grad_norm": 0.48821765184402466, "learning_rate": 2.3988e-05, "loss": 0.0783, "step": 7999 }, { "epoch": 14.760849492151431, "grad_norm": 0.8362130522727966, "learning_rate": 2.3991e-05, "loss": 0.0436, "step": 8000 }, { "epoch": 14.760849492151431, "eval_cer": 0.1098841278481997, "eval_loss": 0.31252992153167725, "eval_runtime": 16.3111, "eval_samples_per_second": 18.638, "eval_steps_per_second": 0.613, "eval_wer": 0.38468917881811204, "step": 8000 }, { "epoch": 14.76269621421976, "grad_norm": 0.43701910972595215, "learning_rate": 2.3993999999999998e-05, "loss": 0.0576, "step": 8001 }, { "epoch": 14.76454293628809, "grad_norm": 0.7060490250587463, "learning_rate": 2.3997e-05, "loss": 0.0971, "step": 8002 }, { "epoch": 14.766389658356417, "grad_norm": 0.3635128140449524, "learning_rate": 2.4e-05, "loss": 0.0305, "step": 8003 }, { "epoch": 14.768236380424746, "grad_norm": 0.3958315849304199, "learning_rate": 2.4003e-05, "loss": 0.0637, "step": 8004 }, { "epoch": 14.770083102493075, "grad_norm": 0.3374282121658325, "learning_rate": 2.4006e-05, "loss": 0.0186, "step": 8005 }, { "epoch": 14.771929824561404, "grad_norm": 0.3075411915779114, "learning_rate": 2.4009e-05, "loss": 0.0136, "step": 8006 }, { "epoch": 14.773776546629731, "grad_norm": 0.6738809943199158, "learning_rate": 2.4012e-05, "loss": 0.0266, "step": 8007 }, { "epoch": 14.77562326869806, "grad_norm": 0.1647556722164154, "learning_rate": 2.4015e-05, "loss": 0.012, "step": 8008 }, { "epoch": 14.77746999076639, "grad_norm": 0.3219056725502014, "learning_rate": 2.4018e-05, "loss": 0.0206, "step": 8009 }, { "epoch": 14.779316712834719, "grad_norm": 0.341482937335968, "learning_rate": 2.4021e-05, "loss": 0.0144, "step": 8010 }, { "epoch": 14.781163434903046, "grad_norm": 0.24657687544822693, "learning_rate": 2.4024e-05, "loss": 0.0141, "step": 8011 }, { "epoch": 14.783010156971375, "grad_norm": 0.4916836619377136, "learning_rate": 2.4027e-05, "loss": 0.0137, "step": 8012 }, { "epoch": 14.784856879039705, "grad_norm": 1.4418329000473022, "learning_rate": 2.4030000000000002e-05, "loss": 0.0184, "step": 8013 }, { "epoch": 14.786703601108034, "grad_norm": 0.2875135540962219, "learning_rate": 2.4033000000000002e-05, "loss": 0.0108, "step": 8014 }, { "epoch": 14.788550323176363, "grad_norm": 0.3412550389766693, "learning_rate": 2.4036e-05, "loss": 0.0152, "step": 8015 }, { "epoch": 14.79039704524469, "grad_norm": 0.5497750043869019, "learning_rate": 2.4039e-05, "loss": 0.0202, "step": 8016 }, { "epoch": 14.79224376731302, "grad_norm": 0.32832539081573486, "learning_rate": 2.4042e-05, "loss": 0.0145, "step": 8017 }, { "epoch": 14.794090489381349, "grad_norm": 0.2167942225933075, "learning_rate": 2.4045e-05, "loss": 0.0151, "step": 8018 }, { "epoch": 14.795937211449676, "grad_norm": 0.36289235949516296, "learning_rate": 2.4048e-05, "loss": 0.0123, "step": 8019 }, { "epoch": 14.797783933518005, "grad_norm": 0.3763875961303711, "learning_rate": 2.4051e-05, "loss": 0.0097, "step": 8020 }, { "epoch": 14.799630655586334, "grad_norm": 0.5189347863197327, "learning_rate": 2.4054e-05, "loss": 0.0125, "step": 8021 }, { "epoch": 14.801477377654663, "grad_norm": 0.2630421817302704, "learning_rate": 2.4057e-05, "loss": 0.0184, "step": 8022 }, { "epoch": 14.803324099722992, "grad_norm": 0.6870790123939514, "learning_rate": 2.4060000000000003e-05, "loss": 0.0161, "step": 8023 }, { "epoch": 14.80517082179132, "grad_norm": 0.65828537940979, "learning_rate": 2.4063000000000003e-05, "loss": 0.0193, "step": 8024 }, { "epoch": 14.807017543859649, "grad_norm": 0.34641534090042114, "learning_rate": 2.4066000000000003e-05, "loss": 0.0123, "step": 8025 }, { "epoch": 14.808864265927978, "grad_norm": 0.24606692790985107, "learning_rate": 2.4069e-05, "loss": 0.0111, "step": 8026 }, { "epoch": 14.810710987996307, "grad_norm": 0.5390129685401917, "learning_rate": 2.4072e-05, "loss": 0.0243, "step": 8027 }, { "epoch": 14.812557710064635, "grad_norm": 0.5146270394325256, "learning_rate": 2.4075e-05, "loss": 0.021, "step": 8028 }, { "epoch": 14.814404432132964, "grad_norm": 0.6502516865730286, "learning_rate": 2.4078e-05, "loss": 0.0277, "step": 8029 }, { "epoch": 14.816251154201293, "grad_norm": 0.4489519000053406, "learning_rate": 2.4081e-05, "loss": 0.0205, "step": 8030 }, { "epoch": 14.818097876269622, "grad_norm": 0.9653266072273254, "learning_rate": 2.4084e-05, "loss": 0.0226, "step": 8031 }, { "epoch": 14.81994459833795, "grad_norm": 0.6530783772468567, "learning_rate": 2.4086999999999998e-05, "loss": 0.0192, "step": 8032 }, { "epoch": 14.821791320406279, "grad_norm": 0.5835822224617004, "learning_rate": 2.409e-05, "loss": 0.0247, "step": 8033 }, { "epoch": 14.823638042474608, "grad_norm": 0.6050820350646973, "learning_rate": 2.4093e-05, "loss": 0.031, "step": 8034 }, { "epoch": 14.825484764542937, "grad_norm": 0.5342754125595093, "learning_rate": 2.4096e-05, "loss": 0.0263, "step": 8035 }, { "epoch": 14.827331486611264, "grad_norm": 0.792512059211731, "learning_rate": 2.4099e-05, "loss": 0.0365, "step": 8036 }, { "epoch": 14.829178208679593, "grad_norm": 0.5477274656295776, "learning_rate": 2.4102e-05, "loss": 0.0271, "step": 8037 }, { "epoch": 14.831024930747922, "grad_norm": 0.6358621120452881, "learning_rate": 2.4105e-05, "loss": 0.0362, "step": 8038 }, { "epoch": 14.832871652816252, "grad_norm": 0.8861833810806274, "learning_rate": 2.4108e-05, "loss": 0.1902, "step": 8039 }, { "epoch": 14.83471837488458, "grad_norm": 0.7224254608154297, "learning_rate": 2.4111e-05, "loss": 0.1701, "step": 8040 }, { "epoch": 14.836565096952908, "grad_norm": 0.6825087070465088, "learning_rate": 2.4114e-05, "loss": 0.1845, "step": 8041 }, { "epoch": 14.838411819021237, "grad_norm": 0.6417450904846191, "learning_rate": 2.4117e-05, "loss": 0.1565, "step": 8042 }, { "epoch": 14.840258541089566, "grad_norm": 0.884933352470398, "learning_rate": 2.4120000000000003e-05, "loss": 0.1841, "step": 8043 }, { "epoch": 14.842105263157894, "grad_norm": 0.6482926607131958, "learning_rate": 2.4123000000000003e-05, "loss": 0.0914, "step": 8044 }, { "epoch": 14.843951985226223, "grad_norm": 0.5458600521087646, "learning_rate": 2.4126000000000002e-05, "loss": 0.0925, "step": 8045 }, { "epoch": 14.845798707294552, "grad_norm": 0.7142218947410583, "learning_rate": 2.4129000000000002e-05, "loss": 0.0796, "step": 8046 }, { "epoch": 14.847645429362881, "grad_norm": 0.5783588290214539, "learning_rate": 2.4132000000000002e-05, "loss": 0.0939, "step": 8047 }, { "epoch": 14.84949215143121, "grad_norm": 0.6058603525161743, "learning_rate": 2.4135000000000002e-05, "loss": 0.0602, "step": 8048 }, { "epoch": 14.851338873499538, "grad_norm": 0.743747889995575, "learning_rate": 2.4138e-05, "loss": 0.0614, "step": 8049 }, { "epoch": 14.853185595567867, "grad_norm": 0.6152564287185669, "learning_rate": 2.4140999999999998e-05, "loss": 0.0833, "step": 8050 }, { "epoch": 14.855032317636196, "grad_norm": 0.5742655992507935, "learning_rate": 2.4143999999999998e-05, "loss": 0.0513, "step": 8051 }, { "epoch": 14.856879039704525, "grad_norm": 0.29754096269607544, "learning_rate": 2.4146999999999998e-05, "loss": 0.0243, "step": 8052 }, { "epoch": 14.858725761772853, "grad_norm": 0.3988395929336548, "learning_rate": 2.415e-05, "loss": 0.0397, "step": 8053 }, { "epoch": 14.860572483841182, "grad_norm": 0.6038896441459656, "learning_rate": 2.4153e-05, "loss": 0.0843, "step": 8054 }, { "epoch": 14.86241920590951, "grad_norm": 0.4392443597316742, "learning_rate": 2.4156e-05, "loss": 0.0268, "step": 8055 }, { "epoch": 14.86426592797784, "grad_norm": 0.35345086455345154, "learning_rate": 2.4159e-05, "loss": 0.0246, "step": 8056 }, { "epoch": 14.866112650046167, "grad_norm": 0.40793344378471375, "learning_rate": 2.4162e-05, "loss": 0.0501, "step": 8057 }, { "epoch": 14.867959372114496, "grad_norm": 0.3084394633769989, "learning_rate": 2.4165e-05, "loss": 0.0521, "step": 8058 }, { "epoch": 14.869806094182826, "grad_norm": 0.26706284284591675, "learning_rate": 2.4168e-05, "loss": 0.0158, "step": 8059 }, { "epoch": 14.871652816251155, "grad_norm": 0.19276896119117737, "learning_rate": 2.4171e-05, "loss": 0.0191, "step": 8060 }, { "epoch": 14.873499538319482, "grad_norm": 0.3058687746524811, "learning_rate": 2.4174e-05, "loss": 0.0155, "step": 8061 }, { "epoch": 14.875346260387811, "grad_norm": 0.35725489258766174, "learning_rate": 2.4177e-05, "loss": 0.0197, "step": 8062 }, { "epoch": 14.87719298245614, "grad_norm": 0.26820647716522217, "learning_rate": 2.4180000000000002e-05, "loss": 0.0093, "step": 8063 }, { "epoch": 14.87903970452447, "grad_norm": 0.2957378625869751, "learning_rate": 2.4183000000000002e-05, "loss": 0.0145, "step": 8064 }, { "epoch": 14.880886426592799, "grad_norm": 0.3385465145111084, "learning_rate": 2.4186000000000002e-05, "loss": 0.0141, "step": 8065 }, { "epoch": 14.882733148661126, "grad_norm": 0.40991589426994324, "learning_rate": 2.4189e-05, "loss": 0.0143, "step": 8066 }, { "epoch": 14.884579870729455, "grad_norm": 0.43255510926246643, "learning_rate": 2.4192e-05, "loss": 0.0063, "step": 8067 }, { "epoch": 14.886426592797784, "grad_norm": 0.34434038400650024, "learning_rate": 2.4195e-05, "loss": 0.0127, "step": 8068 }, { "epoch": 14.888273314866112, "grad_norm": 0.506460964679718, "learning_rate": 2.4198e-05, "loss": 0.0153, "step": 8069 }, { "epoch": 14.89012003693444, "grad_norm": 0.7858155369758606, "learning_rate": 2.4201e-05, "loss": 0.0255, "step": 8070 }, { "epoch": 14.89196675900277, "grad_norm": 0.48727911710739136, "learning_rate": 2.4204e-05, "loss": 0.012, "step": 8071 }, { "epoch": 14.8938134810711, "grad_norm": 0.26856595277786255, "learning_rate": 2.4207e-05, "loss": 0.0118, "step": 8072 }, { "epoch": 14.895660203139428, "grad_norm": 0.4287573993206024, "learning_rate": 2.4210000000000004e-05, "loss": 0.017, "step": 8073 }, { "epoch": 14.897506925207756, "grad_norm": 0.3845706284046173, "learning_rate": 2.4213000000000003e-05, "loss": 0.0061, "step": 8074 }, { "epoch": 14.899353647276085, "grad_norm": 0.9846393465995789, "learning_rate": 2.4216e-05, "loss": 0.0209, "step": 8075 }, { "epoch": 14.901200369344414, "grad_norm": 0.48798713088035583, "learning_rate": 2.4219e-05, "loss": 0.037, "step": 8076 }, { "epoch": 14.903047091412743, "grad_norm": 0.658402144908905, "learning_rate": 2.4222e-05, "loss": 0.0244, "step": 8077 }, { "epoch": 14.90489381348107, "grad_norm": 0.47966620326042175, "learning_rate": 2.4225e-05, "loss": 0.0245, "step": 8078 }, { "epoch": 14.9067405355494, "grad_norm": 0.7510401606559753, "learning_rate": 2.4228e-05, "loss": 0.0347, "step": 8079 }, { "epoch": 14.908587257617729, "grad_norm": 0.24847644567489624, "learning_rate": 2.4231e-05, "loss": 0.0092, "step": 8080 }, { "epoch": 14.910433979686058, "grad_norm": 0.6264692544937134, "learning_rate": 2.4234e-05, "loss": 0.0249, "step": 8081 }, { "epoch": 14.912280701754385, "grad_norm": 0.5359593033790588, "learning_rate": 2.4237e-05, "loss": 0.0277, "step": 8082 }, { "epoch": 14.914127423822714, "grad_norm": 0.4312509000301361, "learning_rate": 2.4240000000000002e-05, "loss": 0.0192, "step": 8083 }, { "epoch": 14.915974145891044, "grad_norm": 0.44593143463134766, "learning_rate": 2.4243e-05, "loss": 0.014, "step": 8084 }, { "epoch": 14.917820867959373, "grad_norm": 0.4584122598171234, "learning_rate": 2.4246e-05, "loss": 0.0323, "step": 8085 }, { "epoch": 14.9196675900277, "grad_norm": 0.5289592742919922, "learning_rate": 2.4249e-05, "loss": 0.0281, "step": 8086 }, { "epoch": 14.92151431209603, "grad_norm": 0.32490670680999756, "learning_rate": 2.4252e-05, "loss": 0.0157, "step": 8087 }, { "epoch": 14.923361034164358, "grad_norm": 0.43771255016326904, "learning_rate": 2.4255e-05, "loss": 0.028, "step": 8088 }, { "epoch": 14.925207756232687, "grad_norm": 1.3917237520217896, "learning_rate": 2.4258e-05, "loss": 0.2859, "step": 8089 }, { "epoch": 14.927054478301017, "grad_norm": 0.5033819079399109, "learning_rate": 2.4261e-05, "loss": 0.1646, "step": 8090 }, { "epoch": 14.928901200369344, "grad_norm": 0.5540380477905273, "learning_rate": 2.4264e-05, "loss": 0.1251, "step": 8091 }, { "epoch": 14.930747922437673, "grad_norm": 0.6526817083358765, "learning_rate": 2.4267e-05, "loss": 0.0993, "step": 8092 }, { "epoch": 14.932594644506002, "grad_norm": 0.8004580140113831, "learning_rate": 2.4270000000000003e-05, "loss": 0.1988, "step": 8093 }, { "epoch": 14.93444136657433, "grad_norm": 0.46787190437316895, "learning_rate": 2.4273000000000003e-05, "loss": 0.0718, "step": 8094 }, { "epoch": 14.936288088642659, "grad_norm": 0.6435708999633789, "learning_rate": 2.4276000000000003e-05, "loss": 0.1238, "step": 8095 }, { "epoch": 14.938134810710988, "grad_norm": 0.506477952003479, "learning_rate": 2.4279000000000003e-05, "loss": 0.0728, "step": 8096 }, { "epoch": 14.939981532779317, "grad_norm": 0.6455765962600708, "learning_rate": 2.4282000000000002e-05, "loss": 0.0505, "step": 8097 }, { "epoch": 14.941828254847646, "grad_norm": 0.7211839556694031, "learning_rate": 2.4285000000000002e-05, "loss": 0.1329, "step": 8098 }, { "epoch": 14.943674976915974, "grad_norm": 0.4941684305667877, "learning_rate": 2.4288e-05, "loss": 0.0469, "step": 8099 }, { "epoch": 14.945521698984303, "grad_norm": 0.8883525133132935, "learning_rate": 2.4291e-05, "loss": 0.0281, "step": 8100 }, { "epoch": 14.947368421052632, "grad_norm": 0.5026273131370544, "learning_rate": 2.4293999999999998e-05, "loss": 0.0234, "step": 8101 }, { "epoch": 14.949215143120961, "grad_norm": 0.3770243227481842, "learning_rate": 2.4296999999999998e-05, "loss": 0.0339, "step": 8102 }, { "epoch": 14.951061865189288, "grad_norm": 0.2530992031097412, "learning_rate": 2.43e-05, "loss": 0.0166, "step": 8103 }, { "epoch": 14.952908587257618, "grad_norm": 0.3264380395412445, "learning_rate": 2.4303e-05, "loss": 0.0198, "step": 8104 }, { "epoch": 14.954755309325947, "grad_norm": 0.6021093130111694, "learning_rate": 2.4306e-05, "loss": 0.0234, "step": 8105 }, { "epoch": 14.956602031394276, "grad_norm": 0.39788442850112915, "learning_rate": 2.4309e-05, "loss": 0.0212, "step": 8106 }, { "epoch": 14.958448753462603, "grad_norm": 0.39071908593177795, "learning_rate": 2.4312e-05, "loss": 0.019, "step": 8107 }, { "epoch": 14.960295475530932, "grad_norm": 0.3165871202945709, "learning_rate": 2.4315e-05, "loss": 0.0152, "step": 8108 }, { "epoch": 14.962142197599261, "grad_norm": 0.3403398096561432, "learning_rate": 2.4318e-05, "loss": 0.0207, "step": 8109 }, { "epoch": 14.96398891966759, "grad_norm": 0.5380444526672363, "learning_rate": 2.4321e-05, "loss": 0.0117, "step": 8110 }, { "epoch": 14.965835641735918, "grad_norm": 0.43799179792404175, "learning_rate": 2.4324e-05, "loss": 0.022, "step": 8111 }, { "epoch": 14.967682363804247, "grad_norm": 1.1583491563796997, "learning_rate": 2.4327e-05, "loss": 0.0156, "step": 8112 }, { "epoch": 14.969529085872576, "grad_norm": 0.3655841052532196, "learning_rate": 2.4330000000000003e-05, "loss": 0.0137, "step": 8113 }, { "epoch": 14.971375807940905, "grad_norm": 0.5397698283195496, "learning_rate": 2.4333000000000002e-05, "loss": 0.0192, "step": 8114 }, { "epoch": 14.973222530009235, "grad_norm": 0.33571380376815796, "learning_rate": 2.4336000000000002e-05, "loss": 0.0102, "step": 8115 }, { "epoch": 14.975069252077562, "grad_norm": 0.3407282829284668, "learning_rate": 2.4339000000000002e-05, "loss": 0.0122, "step": 8116 }, { "epoch": 14.976915974145891, "grad_norm": 0.6962429285049438, "learning_rate": 2.4342000000000002e-05, "loss": 0.0198, "step": 8117 }, { "epoch": 14.97876269621422, "grad_norm": 0.5999586582183838, "learning_rate": 2.4345e-05, "loss": 0.0195, "step": 8118 }, { "epoch": 14.980609418282548, "grad_norm": 0.6077821254730225, "learning_rate": 2.4348e-05, "loss": 0.027, "step": 8119 }, { "epoch": 14.982456140350877, "grad_norm": 0.6994907259941101, "learning_rate": 2.4351e-05, "loss": 0.0174, "step": 8120 }, { "epoch": 14.984302862419206, "grad_norm": 0.5564958453178406, "learning_rate": 2.4354e-05, "loss": 0.0163, "step": 8121 }, { "epoch": 14.986149584487535, "grad_norm": 0.3990434408187866, "learning_rate": 2.4357e-05, "loss": 0.0126, "step": 8122 }, { "epoch": 14.987996306555864, "grad_norm": 0.608258843421936, "learning_rate": 2.4360000000000004e-05, "loss": 0.0144, "step": 8123 }, { "epoch": 14.989843028624191, "grad_norm": 0.524206280708313, "learning_rate": 2.4363e-05, "loss": 0.0243, "step": 8124 }, { "epoch": 14.99168975069252, "grad_norm": 0.4362499713897705, "learning_rate": 2.4366e-05, "loss": 0.0215, "step": 8125 }, { "epoch": 14.99353647276085, "grad_norm": 0.8296584486961365, "learning_rate": 2.4369e-05, "loss": 0.0185, "step": 8126 }, { "epoch": 14.995383194829179, "grad_norm": 0.41815313696861267, "learning_rate": 2.4372e-05, "loss": 0.0166, "step": 8127 }, { "epoch": 14.997229916897506, "grad_norm": 0.3036484718322754, "learning_rate": 2.4375e-05, "loss": 0.01, "step": 8128 }, { "epoch": 14.999076638965835, "grad_norm": 0.6441636681556702, "learning_rate": 2.4378e-05, "loss": 0.0215, "step": 8129 }, { "epoch": 15.0, "grad_norm": 0.900398313999176, "learning_rate": 2.4381e-05, "loss": 0.0189, "step": 8130 }, { "epoch": 15.00184672206833, "grad_norm": 1.1221733093261719, "learning_rate": 2.4384e-05, "loss": 0.213, "step": 8131 }, { "epoch": 15.003693444136658, "grad_norm": 1.017161250114441, "learning_rate": 2.4387e-05, "loss": 0.2084, "step": 8132 }, { "epoch": 15.005540166204986, "grad_norm": 0.7330501675605774, "learning_rate": 2.439e-05, "loss": 0.1478, "step": 8133 }, { "epoch": 15.007386888273315, "grad_norm": 0.568026065826416, "learning_rate": 2.4393000000000002e-05, "loss": 0.1205, "step": 8134 }, { "epoch": 15.009233610341644, "grad_norm": 0.6616817116737366, "learning_rate": 2.4396e-05, "loss": 0.1764, "step": 8135 }, { "epoch": 15.011080332409973, "grad_norm": 0.5726966857910156, "learning_rate": 2.4399e-05, "loss": 0.0941, "step": 8136 }, { "epoch": 15.0129270544783, "grad_norm": 0.6629717350006104, "learning_rate": 2.4402e-05, "loss": 0.1011, "step": 8137 }, { "epoch": 15.01477377654663, "grad_norm": 0.5155584216117859, "learning_rate": 2.4405e-05, "loss": 0.099, "step": 8138 }, { "epoch": 15.016620498614959, "grad_norm": 0.6650702953338623, "learning_rate": 2.4408e-05, "loss": 0.0914, "step": 8139 }, { "epoch": 15.018467220683288, "grad_norm": 0.6222261190414429, "learning_rate": 2.4411e-05, "loss": 0.0997, "step": 8140 }, { "epoch": 15.020313942751615, "grad_norm": 0.6313560009002686, "learning_rate": 2.4414e-05, "loss": 0.0588, "step": 8141 }, { "epoch": 15.022160664819944, "grad_norm": 0.46788403391838074, "learning_rate": 2.4417e-05, "loss": 0.0533, "step": 8142 }, { "epoch": 15.024007386888274, "grad_norm": 0.569367527961731, "learning_rate": 2.442e-05, "loss": 0.0355, "step": 8143 }, { "epoch": 15.025854108956603, "grad_norm": 0.5730012059211731, "learning_rate": 2.4423000000000003e-05, "loss": 0.062, "step": 8144 }, { "epoch": 15.02770083102493, "grad_norm": 0.4984351396560669, "learning_rate": 2.4426000000000003e-05, "loss": 0.0679, "step": 8145 }, { "epoch": 15.02954755309326, "grad_norm": 1.6580547094345093, "learning_rate": 2.4429000000000003e-05, "loss": 0.038, "step": 8146 }, { "epoch": 15.031394275161588, "grad_norm": 0.21034662425518036, "learning_rate": 2.4432000000000003e-05, "loss": 0.0143, "step": 8147 }, { "epoch": 15.033240997229917, "grad_norm": 0.3205229938030243, "learning_rate": 2.4435e-05, "loss": 0.0212, "step": 8148 }, { "epoch": 15.035087719298245, "grad_norm": 0.41856059432029724, "learning_rate": 2.4438e-05, "loss": 0.0437, "step": 8149 }, { "epoch": 15.036934441366574, "grad_norm": 0.4145803451538086, "learning_rate": 2.4441e-05, "loss": 0.0165, "step": 8150 }, { "epoch": 15.038781163434903, "grad_norm": 0.18975673615932465, "learning_rate": 2.4444e-05, "loss": 0.0091, "step": 8151 }, { "epoch": 15.040627885503232, "grad_norm": 0.3147350251674652, "learning_rate": 2.4446999999999998e-05, "loss": 0.0151, "step": 8152 }, { "epoch": 15.04247460757156, "grad_norm": 0.27458664774894714, "learning_rate": 2.4449999999999998e-05, "loss": 0.0121, "step": 8153 }, { "epoch": 15.044321329639889, "grad_norm": 0.35295307636260986, "learning_rate": 2.4453e-05, "loss": 0.0168, "step": 8154 }, { "epoch": 15.046168051708218, "grad_norm": 0.5425369143486023, "learning_rate": 2.4456e-05, "loss": 0.0208, "step": 8155 }, { "epoch": 15.048014773776547, "grad_norm": 0.4190623462200165, "learning_rate": 2.4459e-05, "loss": 0.0103, "step": 8156 }, { "epoch": 15.049861495844876, "grad_norm": 1.073426604270935, "learning_rate": 2.4462e-05, "loss": 0.0246, "step": 8157 }, { "epoch": 15.051708217913204, "grad_norm": 0.29241880774497986, "learning_rate": 2.4465e-05, "loss": 0.0177, "step": 8158 }, { "epoch": 15.053554939981533, "grad_norm": 0.3127516806125641, "learning_rate": 2.4468e-05, "loss": 0.0212, "step": 8159 }, { "epoch": 15.055401662049862, "grad_norm": 0.5754639506340027, "learning_rate": 2.4471e-05, "loss": 0.0134, "step": 8160 }, { "epoch": 15.057248384118191, "grad_norm": 0.7174379825592041, "learning_rate": 2.4474e-05, "loss": 0.0163, "step": 8161 }, { "epoch": 15.059095106186518, "grad_norm": 0.4467093348503113, "learning_rate": 2.4477e-05, "loss": 0.0129, "step": 8162 }, { "epoch": 15.060941828254848, "grad_norm": 0.3584469258785248, "learning_rate": 2.448e-05, "loss": 0.0077, "step": 8163 }, { "epoch": 15.062788550323177, "grad_norm": 0.26156914234161377, "learning_rate": 2.4483000000000003e-05, "loss": 0.0103, "step": 8164 }, { "epoch": 15.064635272391506, "grad_norm": 0.4861030578613281, "learning_rate": 2.4486000000000002e-05, "loss": 0.0195, "step": 8165 }, { "epoch": 15.066481994459833, "grad_norm": 0.31749069690704346, "learning_rate": 2.4489000000000002e-05, "loss": 0.0056, "step": 8166 }, { "epoch": 15.068328716528162, "grad_norm": 0.4234580993652344, "learning_rate": 2.4492000000000002e-05, "loss": 0.0115, "step": 8167 }, { "epoch": 15.070175438596491, "grad_norm": 0.3904325067996979, "learning_rate": 2.4495000000000002e-05, "loss": 0.0101, "step": 8168 }, { "epoch": 15.07202216066482, "grad_norm": 0.7674003839492798, "learning_rate": 2.4498e-05, "loss": 0.0173, "step": 8169 }, { "epoch": 15.073868882733148, "grad_norm": 0.21063567698001862, "learning_rate": 2.4501e-05, "loss": 0.012, "step": 8170 }, { "epoch": 15.075715604801477, "grad_norm": 0.3684234023094177, "learning_rate": 2.4504e-05, "loss": 0.0196, "step": 8171 }, { "epoch": 15.077562326869806, "grad_norm": 1.3102104663848877, "learning_rate": 2.4507e-05, "loss": 0.0079, "step": 8172 }, { "epoch": 15.079409048938135, "grad_norm": 0.29982709884643555, "learning_rate": 2.4509999999999997e-05, "loss": 0.013, "step": 8173 }, { "epoch": 15.081255771006463, "grad_norm": 0.40088796615600586, "learning_rate": 2.4513e-05, "loss": 0.0137, "step": 8174 }, { "epoch": 15.083102493074792, "grad_norm": 0.5310261249542236, "learning_rate": 2.4516e-05, "loss": 0.0173, "step": 8175 }, { "epoch": 15.084949215143121, "grad_norm": 0.54770427942276, "learning_rate": 2.4519e-05, "loss": 0.0104, "step": 8176 }, { "epoch": 15.08679593721145, "grad_norm": 0.6323532462120056, "learning_rate": 2.4522e-05, "loss": 0.0115, "step": 8177 }, { "epoch": 15.088642659279778, "grad_norm": 0.6644243597984314, "learning_rate": 2.4525e-05, "loss": 0.0084, "step": 8178 }, { "epoch": 15.090489381348107, "grad_norm": 0.450868159532547, "learning_rate": 2.4528e-05, "loss": 0.0122, "step": 8179 }, { "epoch": 15.092336103416436, "grad_norm": 0.47782060503959656, "learning_rate": 2.4531e-05, "loss": 0.0095, "step": 8180 }, { "epoch": 15.094182825484765, "grad_norm": 0.7100746035575867, "learning_rate": 2.4534e-05, "loss": 0.1596, "step": 8181 }, { "epoch": 15.096029547553094, "grad_norm": 0.7997457981109619, "learning_rate": 2.4537e-05, "loss": 0.2233, "step": 8182 }, { "epoch": 15.097876269621421, "grad_norm": 0.6452318429946899, "learning_rate": 2.454e-05, "loss": 0.1357, "step": 8183 }, { "epoch": 15.09972299168975, "grad_norm": 0.780385434627533, "learning_rate": 2.4543000000000002e-05, "loss": 0.1706, "step": 8184 }, { "epoch": 15.10156971375808, "grad_norm": 0.5169774889945984, "learning_rate": 2.4546000000000002e-05, "loss": 0.1229, "step": 8185 }, { "epoch": 15.103416435826409, "grad_norm": 0.630163848400116, "learning_rate": 2.4549e-05, "loss": 0.153, "step": 8186 }, { "epoch": 15.105263157894736, "grad_norm": 0.8105989098548889, "learning_rate": 2.4552e-05, "loss": 0.1634, "step": 8187 }, { "epoch": 15.107109879963065, "grad_norm": 0.4336480498313904, "learning_rate": 2.4555e-05, "loss": 0.0631, "step": 8188 }, { "epoch": 15.108956602031395, "grad_norm": 0.5514599680900574, "learning_rate": 2.4558e-05, "loss": 0.1063, "step": 8189 }, { "epoch": 15.110803324099724, "grad_norm": 0.37652090191841125, "learning_rate": 2.4561e-05, "loss": 0.0588, "step": 8190 }, { "epoch": 15.112650046168051, "grad_norm": 0.5351255536079407, "learning_rate": 2.4564e-05, "loss": 0.0622, "step": 8191 }, { "epoch": 15.11449676823638, "grad_norm": 0.35973745584487915, "learning_rate": 2.4567e-05, "loss": 0.041, "step": 8192 }, { "epoch": 15.11634349030471, "grad_norm": 2.9132304191589355, "learning_rate": 2.457e-05, "loss": 0.0948, "step": 8193 }, { "epoch": 15.118190212373039, "grad_norm": 0.2675500512123108, "learning_rate": 2.4573000000000003e-05, "loss": 0.027, "step": 8194 }, { "epoch": 15.120036934441366, "grad_norm": 0.38470208644866943, "learning_rate": 2.4576000000000003e-05, "loss": 0.0556, "step": 8195 }, { "epoch": 15.121883656509695, "grad_norm": 0.5402805209159851, "learning_rate": 2.4579000000000003e-05, "loss": 0.049, "step": 8196 }, { "epoch": 15.123730378578024, "grad_norm": 0.2357839196920395, "learning_rate": 2.4582000000000003e-05, "loss": 0.0499, "step": 8197 }, { "epoch": 15.125577100646353, "grad_norm": 0.40822380781173706, "learning_rate": 2.4585e-05, "loss": 0.0178, "step": 8198 }, { "epoch": 15.12742382271468, "grad_norm": 0.2533171772956848, "learning_rate": 2.4588e-05, "loss": 0.0195, "step": 8199 }, { "epoch": 15.12927054478301, "grad_norm": 0.305999755859375, "learning_rate": 2.4591e-05, "loss": 0.0238, "step": 8200 }, { "epoch": 15.131117266851339, "grad_norm": 0.470662385225296, "learning_rate": 2.4594e-05, "loss": 0.0509, "step": 8201 }, { "epoch": 15.132963988919668, "grad_norm": 0.26189985871315, "learning_rate": 2.4597e-05, "loss": 0.0165, "step": 8202 }, { "epoch": 15.134810710987995, "grad_norm": 1.0813108682632446, "learning_rate": 2.4599999999999998e-05, "loss": 0.0425, "step": 8203 }, { "epoch": 15.136657433056325, "grad_norm": 0.33218902349472046, "learning_rate": 2.4603e-05, "loss": 0.0088, "step": 8204 }, { "epoch": 15.138504155124654, "grad_norm": 0.22756680846214294, "learning_rate": 2.4606e-05, "loss": 0.0087, "step": 8205 }, { "epoch": 15.140350877192983, "grad_norm": 0.34461233019828796, "learning_rate": 2.4609e-05, "loss": 0.0082, "step": 8206 }, { "epoch": 15.142197599261312, "grad_norm": 0.5454093217849731, "learning_rate": 2.4612e-05, "loss": 0.0142, "step": 8207 }, { "epoch": 15.14404432132964, "grad_norm": 0.30798277258872986, "learning_rate": 2.4615e-05, "loss": 0.0128, "step": 8208 }, { "epoch": 15.145891043397969, "grad_norm": 0.4576432704925537, "learning_rate": 2.4618e-05, "loss": 0.0336, "step": 8209 }, { "epoch": 15.147737765466298, "grad_norm": 0.247364342212677, "learning_rate": 2.4621e-05, "loss": 0.0112, "step": 8210 }, { "epoch": 15.149584487534627, "grad_norm": 0.614842414855957, "learning_rate": 2.4624e-05, "loss": 0.0118, "step": 8211 }, { "epoch": 15.151431209602954, "grad_norm": 0.5782668590545654, "learning_rate": 2.4627e-05, "loss": 0.0135, "step": 8212 }, { "epoch": 15.153277931671283, "grad_norm": 0.4947219491004944, "learning_rate": 2.463e-05, "loss": 0.016, "step": 8213 }, { "epoch": 15.155124653739612, "grad_norm": 0.33534643054008484, "learning_rate": 2.4633000000000003e-05, "loss": 0.012, "step": 8214 }, { "epoch": 15.156971375807942, "grad_norm": 0.2798976004123688, "learning_rate": 2.4636000000000003e-05, "loss": 0.0101, "step": 8215 }, { "epoch": 15.158818097876269, "grad_norm": 0.5524477362632751, "learning_rate": 2.4639000000000002e-05, "loss": 0.0207, "step": 8216 }, { "epoch": 15.160664819944598, "grad_norm": 0.3597121238708496, "learning_rate": 2.4642000000000002e-05, "loss": 0.0116, "step": 8217 }, { "epoch": 15.162511542012927, "grad_norm": 0.5381467938423157, "learning_rate": 2.4645000000000002e-05, "loss": 0.0115, "step": 8218 }, { "epoch": 15.164358264081256, "grad_norm": 0.48072612285614014, "learning_rate": 2.4648000000000002e-05, "loss": 0.0178, "step": 8219 }, { "epoch": 15.166204986149584, "grad_norm": 0.9097961187362671, "learning_rate": 2.4651e-05, "loss": 0.0215, "step": 8220 }, { "epoch": 15.168051708217913, "grad_norm": 0.6664448976516724, "learning_rate": 2.4654e-05, "loss": 0.0224, "step": 8221 }, { "epoch": 15.169898430286242, "grad_norm": 0.20867213606834412, "learning_rate": 2.4656999999999998e-05, "loss": 0.0062, "step": 8222 }, { "epoch": 15.171745152354571, "grad_norm": 0.3724096715450287, "learning_rate": 2.4659999999999998e-05, "loss": 0.0171, "step": 8223 }, { "epoch": 15.173591874422899, "grad_norm": 0.5794903635978699, "learning_rate": 2.4663e-05, "loss": 0.0147, "step": 8224 }, { "epoch": 15.175438596491228, "grad_norm": 0.9206926822662354, "learning_rate": 2.4666e-05, "loss": 0.0137, "step": 8225 }, { "epoch": 15.177285318559557, "grad_norm": 0.4412376880645752, "learning_rate": 2.4669e-05, "loss": 0.014, "step": 8226 }, { "epoch": 15.179132040627886, "grad_norm": 0.5383925437927246, "learning_rate": 2.4672e-05, "loss": 0.0171, "step": 8227 }, { "epoch": 15.180978762696213, "grad_norm": 0.5624282360076904, "learning_rate": 2.4675e-05, "loss": 0.0146, "step": 8228 }, { "epoch": 15.182825484764543, "grad_norm": 0.6234703660011292, "learning_rate": 2.4678e-05, "loss": 0.0174, "step": 8229 }, { "epoch": 15.184672206832872, "grad_norm": 0.9138628840446472, "learning_rate": 2.4681e-05, "loss": 0.0164, "step": 8230 }, { "epoch": 15.1865189289012, "grad_norm": 1.2404528856277466, "learning_rate": 2.4684e-05, "loss": 0.2855, "step": 8231 }, { "epoch": 15.18836565096953, "grad_norm": 0.7322246432304382, "learning_rate": 2.4687e-05, "loss": 0.1642, "step": 8232 }, { "epoch": 15.190212373037857, "grad_norm": 0.6022248864173889, "learning_rate": 2.469e-05, "loss": 0.1234, "step": 8233 }, { "epoch": 15.192059095106186, "grad_norm": 0.5909581780433655, "learning_rate": 2.4693000000000002e-05, "loss": 0.1583, "step": 8234 }, { "epoch": 15.193905817174516, "grad_norm": 0.560417652130127, "learning_rate": 2.4696000000000002e-05, "loss": 0.1112, "step": 8235 }, { "epoch": 15.195752539242845, "grad_norm": 0.5888610482215881, "learning_rate": 2.4699000000000002e-05, "loss": 0.105, "step": 8236 }, { "epoch": 15.197599261311172, "grad_norm": 0.945635199546814, "learning_rate": 2.4702e-05, "loss": 0.107, "step": 8237 }, { "epoch": 15.199445983379501, "grad_norm": 0.6924152374267578, "learning_rate": 2.4705e-05, "loss": 0.0957, "step": 8238 }, { "epoch": 15.20129270544783, "grad_norm": 0.4013981521129608, "learning_rate": 2.4708e-05, "loss": 0.0823, "step": 8239 }, { "epoch": 15.20313942751616, "grad_norm": 0.5373607277870178, "learning_rate": 2.4711e-05, "loss": 0.0763, "step": 8240 }, { "epoch": 15.204986149584487, "grad_norm": 0.7776703238487244, "learning_rate": 2.4714e-05, "loss": 0.087, "step": 8241 }, { "epoch": 15.206832871652816, "grad_norm": 0.4090069532394409, "learning_rate": 2.4717e-05, "loss": 0.0799, "step": 8242 }, { "epoch": 15.208679593721145, "grad_norm": 0.43735113739967346, "learning_rate": 2.472e-05, "loss": 0.0332, "step": 8243 }, { "epoch": 15.210526315789474, "grad_norm": 0.3009633421897888, "learning_rate": 2.4723000000000004e-05, "loss": 0.0261, "step": 8244 }, { "epoch": 15.212373037857802, "grad_norm": 0.42820805311203003, "learning_rate": 2.4726000000000003e-05, "loss": 0.0317, "step": 8245 }, { "epoch": 15.21421975992613, "grad_norm": 0.4590342044830322, "learning_rate": 2.4729000000000003e-05, "loss": 0.0215, "step": 8246 }, { "epoch": 15.21606648199446, "grad_norm": 0.5674740076065063, "learning_rate": 2.4732e-05, "loss": 0.0199, "step": 8247 }, { "epoch": 15.21791320406279, "grad_norm": 0.677531898021698, "learning_rate": 2.4735e-05, "loss": 0.0201, "step": 8248 }, { "epoch": 15.219759926131117, "grad_norm": 0.3279813826084137, "learning_rate": 2.4738e-05, "loss": 0.0202, "step": 8249 }, { "epoch": 15.221606648199446, "grad_norm": 0.3743285536766052, "learning_rate": 2.4741e-05, "loss": 0.0164, "step": 8250 }, { "epoch": 15.223453370267775, "grad_norm": 0.5781236290931702, "learning_rate": 2.4744e-05, "loss": 0.0164, "step": 8251 }, { "epoch": 15.225300092336104, "grad_norm": 0.47307130694389343, "learning_rate": 2.4747e-05, "loss": 0.0157, "step": 8252 }, { "epoch": 15.227146814404431, "grad_norm": 0.3261812925338745, "learning_rate": 2.475e-05, "loss": 0.0109, "step": 8253 }, { "epoch": 15.22899353647276, "grad_norm": 0.6554425954818726, "learning_rate": 2.4753e-05, "loss": 0.0216, "step": 8254 }, { "epoch": 15.23084025854109, "grad_norm": 0.3593559265136719, "learning_rate": 2.4756e-05, "loss": 0.0221, "step": 8255 }, { "epoch": 15.232686980609419, "grad_norm": 0.3336569368839264, "learning_rate": 2.4759e-05, "loss": 0.0117, "step": 8256 }, { "epoch": 15.234533702677748, "grad_norm": 1.3743199110031128, "learning_rate": 2.4762e-05, "loss": 0.0199, "step": 8257 }, { "epoch": 15.236380424746075, "grad_norm": 0.25178849697113037, "learning_rate": 2.4765e-05, "loss": 0.0083, "step": 8258 }, { "epoch": 15.238227146814404, "grad_norm": 0.37484949827194214, "learning_rate": 2.4768e-05, "loss": 0.0118, "step": 8259 }, { "epoch": 15.240073868882734, "grad_norm": 0.46891501545906067, "learning_rate": 2.4771e-05, "loss": 0.0663, "step": 8260 }, { "epoch": 15.241920590951063, "grad_norm": 0.4010298252105713, "learning_rate": 2.4774e-05, "loss": 0.0222, "step": 8261 }, { "epoch": 15.24376731301939, "grad_norm": 0.4214724600315094, "learning_rate": 2.4777e-05, "loss": 0.0121, "step": 8262 }, { "epoch": 15.24561403508772, "grad_norm": 0.29846876859664917, "learning_rate": 2.478e-05, "loss": 0.0178, "step": 8263 }, { "epoch": 15.247460757156048, "grad_norm": 0.2897872030735016, "learning_rate": 2.4783e-05, "loss": 0.0109, "step": 8264 }, { "epoch": 15.249307479224377, "grad_norm": 0.5751253366470337, "learning_rate": 2.4786000000000003e-05, "loss": 0.0195, "step": 8265 }, { "epoch": 15.251154201292705, "grad_norm": 0.3264547288417816, "learning_rate": 2.4789000000000003e-05, "loss": 0.0142, "step": 8266 }, { "epoch": 15.253000923361034, "grad_norm": 0.5448217988014221, "learning_rate": 2.4792000000000003e-05, "loss": 0.0166, "step": 8267 }, { "epoch": 15.254847645429363, "grad_norm": 0.4921300411224365, "learning_rate": 2.4795000000000002e-05, "loss": 0.0176, "step": 8268 }, { "epoch": 15.256694367497692, "grad_norm": 0.522863507270813, "learning_rate": 2.4798000000000002e-05, "loss": 0.0248, "step": 8269 }, { "epoch": 15.25854108956602, "grad_norm": 0.6211817264556885, "learning_rate": 2.4801000000000002e-05, "loss": 0.0174, "step": 8270 }, { "epoch": 15.260387811634349, "grad_norm": 0.48236003518104553, "learning_rate": 2.4804e-05, "loss": 0.0082, "step": 8271 }, { "epoch": 15.262234533702678, "grad_norm": 0.27671054005622864, "learning_rate": 2.4806999999999998e-05, "loss": 0.0151, "step": 8272 }, { "epoch": 15.264081255771007, "grad_norm": 0.6454673409461975, "learning_rate": 2.4809999999999998e-05, "loss": 0.0204, "step": 8273 }, { "epoch": 15.265927977839334, "grad_norm": 0.5842491388320923, "learning_rate": 2.4812999999999998e-05, "loss": 0.0149, "step": 8274 }, { "epoch": 15.267774699907664, "grad_norm": 0.42065128684043884, "learning_rate": 2.4816e-05, "loss": 0.0142, "step": 8275 }, { "epoch": 15.269621421975993, "grad_norm": 0.5586716532707214, "learning_rate": 2.4819e-05, "loss": 0.0245, "step": 8276 }, { "epoch": 15.271468144044322, "grad_norm": 0.6155346632003784, "learning_rate": 2.4822e-05, "loss": 0.0293, "step": 8277 }, { "epoch": 15.27331486611265, "grad_norm": 0.33992999792099, "learning_rate": 2.4825e-05, "loss": 0.0101, "step": 8278 }, { "epoch": 15.275161588180978, "grad_norm": 0.25734278559684753, "learning_rate": 2.4828e-05, "loss": 0.0123, "step": 8279 }, { "epoch": 15.277008310249307, "grad_norm": 2.350486993789673, "learning_rate": 2.4831e-05, "loss": 0.0196, "step": 8280 }, { "epoch": 15.278855032317637, "grad_norm": 0.7939901947975159, "learning_rate": 2.4834e-05, "loss": 0.1937, "step": 8281 }, { "epoch": 15.280701754385966, "grad_norm": 1.1575547456741333, "learning_rate": 2.4837e-05, "loss": 0.2262, "step": 8282 }, { "epoch": 15.282548476454293, "grad_norm": 0.9486008882522583, "learning_rate": 2.484e-05, "loss": 0.2102, "step": 8283 }, { "epoch": 15.284395198522622, "grad_norm": 0.4723045229911804, "learning_rate": 2.4843e-05, "loss": 0.1015, "step": 8284 }, { "epoch": 15.286241920590951, "grad_norm": 0.8471699953079224, "learning_rate": 2.4846000000000002e-05, "loss": 0.1094, "step": 8285 }, { "epoch": 15.28808864265928, "grad_norm": 0.7257476449012756, "learning_rate": 2.4849000000000002e-05, "loss": 0.0808, "step": 8286 }, { "epoch": 15.289935364727608, "grad_norm": 0.6892672777175903, "learning_rate": 2.4852000000000002e-05, "loss": 0.0707, "step": 8287 }, { "epoch": 15.291782086795937, "grad_norm": 0.5333134531974792, "learning_rate": 2.4855000000000002e-05, "loss": 0.0887, "step": 8288 }, { "epoch": 15.293628808864266, "grad_norm": 0.4352606534957886, "learning_rate": 2.4858e-05, "loss": 0.0717, "step": 8289 }, { "epoch": 15.295475530932595, "grad_norm": 0.5483019948005676, "learning_rate": 2.4861e-05, "loss": 0.0879, "step": 8290 }, { "epoch": 15.297322253000923, "grad_norm": 0.37380048632621765, "learning_rate": 2.4864e-05, "loss": 0.043, "step": 8291 }, { "epoch": 15.299168975069252, "grad_norm": 0.4453401565551758, "learning_rate": 2.4867e-05, "loss": 0.0515, "step": 8292 }, { "epoch": 15.301015697137581, "grad_norm": 0.45170173048973083, "learning_rate": 2.487e-05, "loss": 0.0563, "step": 8293 }, { "epoch": 15.30286241920591, "grad_norm": 0.4668281674385071, "learning_rate": 2.4873e-05, "loss": 0.0672, "step": 8294 }, { "epoch": 15.304709141274238, "grad_norm": 0.3134375810623169, "learning_rate": 2.4876000000000004e-05, "loss": 0.0483, "step": 8295 }, { "epoch": 15.306555863342567, "grad_norm": 0.4253406524658203, "learning_rate": 2.4879e-05, "loss": 0.0176, "step": 8296 }, { "epoch": 15.308402585410896, "grad_norm": 0.6559887528419495, "learning_rate": 2.4882e-05, "loss": 0.0227, "step": 8297 }, { "epoch": 15.310249307479225, "grad_norm": 0.4013559818267822, "learning_rate": 2.4885e-05, "loss": 0.0298, "step": 8298 }, { "epoch": 15.312096029547552, "grad_norm": 0.5104976296424866, "learning_rate": 2.4888e-05, "loss": 0.0187, "step": 8299 }, { "epoch": 15.313942751615881, "grad_norm": 0.3234640657901764, "learning_rate": 2.4891e-05, "loss": 0.0326, "step": 8300 }, { "epoch": 15.31578947368421, "grad_norm": 0.3353601396083832, "learning_rate": 2.4894e-05, "loss": 0.0245, "step": 8301 }, { "epoch": 15.31763619575254, "grad_norm": 0.4961949288845062, "learning_rate": 2.4897e-05, "loss": 0.0093, "step": 8302 }, { "epoch": 15.319482917820867, "grad_norm": 0.5354464054107666, "learning_rate": 2.49e-05, "loss": 0.0193, "step": 8303 }, { "epoch": 15.321329639889196, "grad_norm": 0.53691166639328, "learning_rate": 2.4903e-05, "loss": 0.0211, "step": 8304 }, { "epoch": 15.323176361957525, "grad_norm": 0.22957701981067657, "learning_rate": 2.4906000000000002e-05, "loss": 0.0073, "step": 8305 }, { "epoch": 15.325023084025855, "grad_norm": 0.2727436125278473, "learning_rate": 2.4909e-05, "loss": 0.0119, "step": 8306 }, { "epoch": 15.326869806094184, "grad_norm": 0.25763967633247375, "learning_rate": 2.4912e-05, "loss": 0.0152, "step": 8307 }, { "epoch": 15.328716528162511, "grad_norm": 0.36578795313835144, "learning_rate": 2.4915e-05, "loss": 0.0519, "step": 8308 }, { "epoch": 15.33056325023084, "grad_norm": 0.45741453766822815, "learning_rate": 2.4918e-05, "loss": 0.0198, "step": 8309 }, { "epoch": 15.33240997229917, "grad_norm": 0.2624121606349945, "learning_rate": 2.4921e-05, "loss": 0.0141, "step": 8310 }, { "epoch": 15.334256694367498, "grad_norm": 3.8751049041748047, "learning_rate": 2.4924e-05, "loss": 0.0151, "step": 8311 }, { "epoch": 15.336103416435826, "grad_norm": 0.37224772572517395, "learning_rate": 2.4927e-05, "loss": 0.0326, "step": 8312 }, { "epoch": 15.337950138504155, "grad_norm": 1.0172438621520996, "learning_rate": 2.493e-05, "loss": 0.0277, "step": 8313 }, { "epoch": 15.339796860572484, "grad_norm": 5.484036922454834, "learning_rate": 2.4933e-05, "loss": 0.029, "step": 8314 }, { "epoch": 15.341643582640813, "grad_norm": 0.3800578713417053, "learning_rate": 2.4936000000000003e-05, "loss": 0.009, "step": 8315 }, { "epoch": 15.34349030470914, "grad_norm": 0.3240329921245575, "learning_rate": 2.4939000000000003e-05, "loss": 0.0097, "step": 8316 }, { "epoch": 15.34533702677747, "grad_norm": 0.6319631338119507, "learning_rate": 2.4942000000000003e-05, "loss": 0.0205, "step": 8317 }, { "epoch": 15.347183748845799, "grad_norm": 0.4132469892501831, "learning_rate": 2.4945000000000003e-05, "loss": 0.0226, "step": 8318 }, { "epoch": 15.349030470914128, "grad_norm": 0.41254034638404846, "learning_rate": 2.4948000000000002e-05, "loss": 0.0103, "step": 8319 }, { "epoch": 15.350877192982455, "grad_norm": 0.705245852470398, "learning_rate": 2.4951e-05, "loss": 0.0195, "step": 8320 }, { "epoch": 15.352723915050785, "grad_norm": 0.988831102848053, "learning_rate": 2.4954e-05, "loss": 0.0222, "step": 8321 }, { "epoch": 15.354570637119114, "grad_norm": 0.5189151167869568, "learning_rate": 2.4957e-05, "loss": 0.0228, "step": 8322 }, { "epoch": 15.356417359187443, "grad_norm": 0.41819465160369873, "learning_rate": 2.4959999999999998e-05, "loss": 0.0218, "step": 8323 }, { "epoch": 15.35826408125577, "grad_norm": 0.2617114186286926, "learning_rate": 2.4962999999999998e-05, "loss": 0.0126, "step": 8324 }, { "epoch": 15.3601108033241, "grad_norm": 0.5680693984031677, "learning_rate": 2.4966e-05, "loss": 0.0208, "step": 8325 }, { "epoch": 15.361957525392429, "grad_norm": 3.6656782627105713, "learning_rate": 2.4969e-05, "loss": 0.0187, "step": 8326 }, { "epoch": 15.363804247460758, "grad_norm": 0.5825662612915039, "learning_rate": 2.4972e-05, "loss": 0.0178, "step": 8327 }, { "epoch": 15.365650969529085, "grad_norm": 0.4422478973865509, "learning_rate": 2.4975e-05, "loss": 0.0168, "step": 8328 }, { "epoch": 15.367497691597414, "grad_norm": 0.7485823035240173, "learning_rate": 2.4978e-05, "loss": 0.0232, "step": 8329 }, { "epoch": 15.369344413665743, "grad_norm": 0.7661342620849609, "learning_rate": 2.4981e-05, "loss": 0.0386, "step": 8330 }, { "epoch": 15.371191135734072, "grad_norm": 0.7451789975166321, "learning_rate": 2.4984e-05, "loss": 0.1895, "step": 8331 }, { "epoch": 15.373037857802402, "grad_norm": 0.7402056455612183, "learning_rate": 2.4987e-05, "loss": 0.1275, "step": 8332 }, { "epoch": 15.374884579870729, "grad_norm": 0.7276471257209778, "learning_rate": 2.499e-05, "loss": 0.2129, "step": 8333 }, { "epoch": 15.376731301939058, "grad_norm": 0.6210782527923584, "learning_rate": 2.4993e-05, "loss": 0.1199, "step": 8334 }, { "epoch": 15.378578024007387, "grad_norm": 0.7629683017730713, "learning_rate": 2.4996000000000003e-05, "loss": 0.1289, "step": 8335 }, { "epoch": 15.380424746075716, "grad_norm": 0.5822878479957581, "learning_rate": 2.4999000000000002e-05, "loss": 0.1173, "step": 8336 }, { "epoch": 15.382271468144044, "grad_norm": 0.551585853099823, "learning_rate": 2.5002000000000002e-05, "loss": 0.0899, "step": 8337 }, { "epoch": 15.384118190212373, "grad_norm": 0.48120298981666565, "learning_rate": 2.5005000000000002e-05, "loss": 0.0821, "step": 8338 }, { "epoch": 15.385964912280702, "grad_norm": 0.6678263545036316, "learning_rate": 2.5008000000000002e-05, "loss": 0.0939, "step": 8339 }, { "epoch": 15.387811634349031, "grad_norm": 0.4962681233882904, "learning_rate": 2.5011e-05, "loss": 0.068, "step": 8340 }, { "epoch": 15.389658356417359, "grad_norm": 0.4213404059410095, "learning_rate": 2.5014e-05, "loss": 0.0482, "step": 8341 }, { "epoch": 15.391505078485688, "grad_norm": 0.5263739228248596, "learning_rate": 2.5017e-05, "loss": 0.066, "step": 8342 }, { "epoch": 15.393351800554017, "grad_norm": 0.45648154616355896, "learning_rate": 2.502e-05, "loss": 0.0465, "step": 8343 }, { "epoch": 15.395198522622346, "grad_norm": 0.33768972754478455, "learning_rate": 2.5023e-05, "loss": 0.0305, "step": 8344 }, { "epoch": 15.397045244690673, "grad_norm": 0.42431968450546265, "learning_rate": 2.5026e-05, "loss": 0.0504, "step": 8345 }, { "epoch": 15.398891966759003, "grad_norm": 0.25444427132606506, "learning_rate": 2.5029e-05, "loss": 0.0166, "step": 8346 }, { "epoch": 15.400738688827332, "grad_norm": 0.41962170600891113, "learning_rate": 2.5032e-05, "loss": 0.0172, "step": 8347 }, { "epoch": 15.40258541089566, "grad_norm": 0.7231307029724121, "learning_rate": 2.5035e-05, "loss": 0.0227, "step": 8348 }, { "epoch": 15.404432132963988, "grad_norm": 0.4448593556880951, "learning_rate": 2.5038e-05, "loss": 0.0197, "step": 8349 }, { "epoch": 15.406278855032317, "grad_norm": 3.519976854324341, "learning_rate": 2.5041e-05, "loss": 0.0768, "step": 8350 }, { "epoch": 15.408125577100646, "grad_norm": 0.3873467743396759, "learning_rate": 2.5044e-05, "loss": 0.0219, "step": 8351 }, { "epoch": 15.409972299168976, "grad_norm": 0.2840051054954529, "learning_rate": 2.5047e-05, "loss": 0.0239, "step": 8352 }, { "epoch": 15.411819021237303, "grad_norm": 0.43799832463264465, "learning_rate": 2.505e-05, "loss": 0.0164, "step": 8353 }, { "epoch": 15.413665743305632, "grad_norm": 0.4558172821998596, "learning_rate": 2.5053e-05, "loss": 0.0175, "step": 8354 }, { "epoch": 15.415512465373961, "grad_norm": 0.2507631778717041, "learning_rate": 2.5056000000000002e-05, "loss": 0.0132, "step": 8355 }, { "epoch": 15.41735918744229, "grad_norm": 0.2601400315761566, "learning_rate": 2.5059000000000002e-05, "loss": 0.0132, "step": 8356 }, { "epoch": 15.41920590951062, "grad_norm": 0.3279007077217102, "learning_rate": 2.5062e-05, "loss": 0.0118, "step": 8357 }, { "epoch": 15.421052631578947, "grad_norm": 0.36395788192749023, "learning_rate": 2.5065e-05, "loss": 0.0134, "step": 8358 }, { "epoch": 15.422899353647276, "grad_norm": 0.6187348365783691, "learning_rate": 2.5068e-05, "loss": 0.0233, "step": 8359 }, { "epoch": 15.424746075715605, "grad_norm": 0.6799266338348389, "learning_rate": 2.5071e-05, "loss": 0.0133, "step": 8360 }, { "epoch": 15.426592797783934, "grad_norm": 0.9328656196594238, "learning_rate": 2.5074e-05, "loss": 0.0264, "step": 8361 }, { "epoch": 15.428439519852262, "grad_norm": 0.4898659288883209, "learning_rate": 2.5077e-05, "loss": 0.0151, "step": 8362 }, { "epoch": 15.43028624192059, "grad_norm": 0.6099836230278015, "learning_rate": 2.508e-05, "loss": 0.0189, "step": 8363 }, { "epoch": 15.43213296398892, "grad_norm": 0.23199789226055145, "learning_rate": 2.5083e-05, "loss": 0.0081, "step": 8364 }, { "epoch": 15.43397968605725, "grad_norm": 0.24271805584430695, "learning_rate": 2.5086000000000003e-05, "loss": 0.0095, "step": 8365 }, { "epoch": 15.435826408125576, "grad_norm": 0.37805840373039246, "learning_rate": 2.5089000000000003e-05, "loss": 0.0161, "step": 8366 }, { "epoch": 15.437673130193906, "grad_norm": 0.49491918087005615, "learning_rate": 2.5092000000000003e-05, "loss": 0.0079, "step": 8367 }, { "epoch": 15.439519852262235, "grad_norm": 0.7112870216369629, "learning_rate": 2.5095000000000003e-05, "loss": 0.0227, "step": 8368 }, { "epoch": 15.441366574330564, "grad_norm": 0.4112222492694855, "learning_rate": 2.5098000000000003e-05, "loss": 0.01, "step": 8369 }, { "epoch": 15.443213296398891, "grad_norm": 0.6659647226333618, "learning_rate": 2.5101e-05, "loss": 0.0222, "step": 8370 }, { "epoch": 15.44506001846722, "grad_norm": 0.4516090452671051, "learning_rate": 2.5104e-05, "loss": 0.0133, "step": 8371 }, { "epoch": 15.44690674053555, "grad_norm": 0.7286649942398071, "learning_rate": 2.5107e-05, "loss": 0.018, "step": 8372 }, { "epoch": 15.448753462603879, "grad_norm": 0.6150336265563965, "learning_rate": 2.511e-05, "loss": 0.0234, "step": 8373 }, { "epoch": 15.450600184672206, "grad_norm": 0.266416072845459, "learning_rate": 2.5112999999999998e-05, "loss": 0.0122, "step": 8374 }, { "epoch": 15.452446906740535, "grad_norm": 0.26499465107917786, "learning_rate": 2.5116e-05, "loss": 0.0112, "step": 8375 }, { "epoch": 15.454293628808864, "grad_norm": 0.5799499154090881, "learning_rate": 2.5119e-05, "loss": 0.0175, "step": 8376 }, { "epoch": 15.456140350877194, "grad_norm": 0.625869631767273, "learning_rate": 2.5122e-05, "loss": 0.0204, "step": 8377 }, { "epoch": 15.45798707294552, "grad_norm": 0.6527268886566162, "learning_rate": 2.5125e-05, "loss": 0.039, "step": 8378 }, { "epoch": 15.45983379501385, "grad_norm": 1.0619781017303467, "learning_rate": 2.5128e-05, "loss": 0.0231, "step": 8379 }, { "epoch": 15.46168051708218, "grad_norm": 0.5146917700767517, "learning_rate": 2.5131e-05, "loss": 0.0252, "step": 8380 }, { "epoch": 15.463527239150508, "grad_norm": 0.8048033118247986, "learning_rate": 2.5134e-05, "loss": 0.2758, "step": 8381 }, { "epoch": 15.465373961218837, "grad_norm": 0.56715989112854, "learning_rate": 2.5137e-05, "loss": 0.1736, "step": 8382 }, { "epoch": 15.467220683287165, "grad_norm": 1.2161662578582764, "learning_rate": 2.514e-05, "loss": 0.1227, "step": 8383 }, { "epoch": 15.469067405355494, "grad_norm": 0.6002994775772095, "learning_rate": 2.5143e-05, "loss": 0.1131, "step": 8384 }, { "epoch": 15.470914127423823, "grad_norm": 0.6885210871696472, "learning_rate": 2.5146e-05, "loss": 0.1846, "step": 8385 }, { "epoch": 15.472760849492152, "grad_norm": 0.5307503342628479, "learning_rate": 2.5149000000000003e-05, "loss": 0.0912, "step": 8386 }, { "epoch": 15.47460757156048, "grad_norm": 0.35235410928726196, "learning_rate": 2.5152000000000002e-05, "loss": 0.0671, "step": 8387 }, { "epoch": 15.476454293628809, "grad_norm": 0.6050505638122559, "learning_rate": 2.5155000000000002e-05, "loss": 0.1021, "step": 8388 }, { "epoch": 15.478301015697138, "grad_norm": 0.526045024394989, "learning_rate": 2.5158000000000002e-05, "loss": 0.0765, "step": 8389 }, { "epoch": 15.480147737765467, "grad_norm": 0.36944779753685, "learning_rate": 2.5161000000000002e-05, "loss": 0.0586, "step": 8390 }, { "epoch": 15.481994459833794, "grad_norm": 0.41731560230255127, "learning_rate": 2.5164e-05, "loss": 0.052, "step": 8391 }, { "epoch": 15.483841181902124, "grad_norm": 0.8687494397163391, "learning_rate": 2.5167e-05, "loss": 0.0673, "step": 8392 }, { "epoch": 15.485687903970453, "grad_norm": 0.6858538389205933, "learning_rate": 2.517e-05, "loss": 0.0602, "step": 8393 }, { "epoch": 15.487534626038782, "grad_norm": 1.1235566139221191, "learning_rate": 2.5172999999999998e-05, "loss": 0.0459, "step": 8394 }, { "epoch": 15.48938134810711, "grad_norm": 0.41303691267967224, "learning_rate": 2.5175999999999997e-05, "loss": 0.0571, "step": 8395 }, { "epoch": 15.491228070175438, "grad_norm": 0.5385510325431824, "learning_rate": 2.5179e-05, "loss": 0.0399, "step": 8396 }, { "epoch": 15.493074792243767, "grad_norm": 0.3182043731212616, "learning_rate": 2.5182e-05, "loss": 0.0363, "step": 8397 }, { "epoch": 15.494921514312097, "grad_norm": 0.24598966538906097, "learning_rate": 2.5185e-05, "loss": 0.0111, "step": 8398 }, { "epoch": 15.496768236380424, "grad_norm": 0.40207529067993164, "learning_rate": 2.5188e-05, "loss": 0.0207, "step": 8399 }, { "epoch": 15.498614958448753, "grad_norm": 0.5250248312950134, "learning_rate": 2.5191e-05, "loss": 0.0587, "step": 8400 }, { "epoch": 15.500461680517082, "grad_norm": 0.33213678002357483, "learning_rate": 2.5194e-05, "loss": 0.0199, "step": 8401 }, { "epoch": 15.502308402585411, "grad_norm": 0.17228974401950836, "learning_rate": 2.5197e-05, "loss": 0.0081, "step": 8402 }, { "epoch": 15.504155124653739, "grad_norm": 0.3369840383529663, "learning_rate": 2.52e-05, "loss": 0.0109, "step": 8403 }, { "epoch": 15.506001846722068, "grad_norm": 0.31128376722335815, "learning_rate": 2.5203e-05, "loss": 0.0113, "step": 8404 }, { "epoch": 15.507848568790397, "grad_norm": 0.36564984917640686, "learning_rate": 2.5206e-05, "loss": 0.0221, "step": 8405 }, { "epoch": 15.509695290858726, "grad_norm": 0.6158779859542847, "learning_rate": 2.5209000000000002e-05, "loss": 0.0188, "step": 8406 }, { "epoch": 15.511542012927055, "grad_norm": 0.4490515887737274, "learning_rate": 2.5212000000000002e-05, "loss": 0.0254, "step": 8407 }, { "epoch": 15.513388734995383, "grad_norm": 0.4023129642009735, "learning_rate": 2.5215e-05, "loss": 0.0178, "step": 8408 }, { "epoch": 15.515235457063712, "grad_norm": 0.7977317571640015, "learning_rate": 2.5218e-05, "loss": 0.0323, "step": 8409 }, { "epoch": 15.517082179132041, "grad_norm": 0.8370609283447266, "learning_rate": 2.5221e-05, "loss": 0.016, "step": 8410 }, { "epoch": 15.51892890120037, "grad_norm": 0.512177586555481, "learning_rate": 2.5224e-05, "loss": 0.0407, "step": 8411 }, { "epoch": 15.520775623268698, "grad_norm": 0.4275936186313629, "learning_rate": 2.5227e-05, "loss": 0.0131, "step": 8412 }, { "epoch": 15.522622345337027, "grad_norm": 0.22574132680892944, "learning_rate": 2.523e-05, "loss": 0.0088, "step": 8413 }, { "epoch": 15.524469067405356, "grad_norm": 0.2435065507888794, "learning_rate": 2.5233e-05, "loss": 0.0085, "step": 8414 }, { "epoch": 15.526315789473685, "grad_norm": 0.6562572717666626, "learning_rate": 2.5236e-05, "loss": 0.0204, "step": 8415 }, { "epoch": 15.528162511542012, "grad_norm": 0.3493426442146301, "learning_rate": 2.5239000000000003e-05, "loss": 0.006, "step": 8416 }, { "epoch": 15.530009233610341, "grad_norm": 0.3053175210952759, "learning_rate": 2.5242000000000003e-05, "loss": 0.0384, "step": 8417 }, { "epoch": 15.53185595567867, "grad_norm": 0.4377683699131012, "learning_rate": 2.5245000000000003e-05, "loss": 0.0208, "step": 8418 }, { "epoch": 15.533702677747, "grad_norm": 0.35627028346061707, "learning_rate": 2.5248e-05, "loss": 0.0177, "step": 8419 }, { "epoch": 15.535549399815327, "grad_norm": 0.33622390031814575, "learning_rate": 2.5251e-05, "loss": 0.0101, "step": 8420 }, { "epoch": 15.537396121883656, "grad_norm": 0.40122103691101074, "learning_rate": 2.5254e-05, "loss": 0.0137, "step": 8421 }, { "epoch": 15.539242843951985, "grad_norm": 0.44825655221939087, "learning_rate": 2.5257e-05, "loss": 0.0162, "step": 8422 }, { "epoch": 15.541089566020315, "grad_norm": 0.32165566086769104, "learning_rate": 2.526e-05, "loss": 0.0148, "step": 8423 }, { "epoch": 15.542936288088642, "grad_norm": 0.43938520550727844, "learning_rate": 2.5263e-05, "loss": 0.0132, "step": 8424 }, { "epoch": 15.544783010156971, "grad_norm": 0.36822405457496643, "learning_rate": 2.5266e-05, "loss": 0.0166, "step": 8425 }, { "epoch": 15.5466297322253, "grad_norm": 0.7597044706344604, "learning_rate": 2.5269e-05, "loss": 0.0244, "step": 8426 }, { "epoch": 15.54847645429363, "grad_norm": 0.6303825378417969, "learning_rate": 2.5272e-05, "loss": 0.0175, "step": 8427 }, { "epoch": 15.550323176361957, "grad_norm": 0.38170045614242554, "learning_rate": 2.5275e-05, "loss": 0.0105, "step": 8428 }, { "epoch": 15.552169898430286, "grad_norm": 0.5360559225082397, "learning_rate": 2.5278e-05, "loss": 0.0149, "step": 8429 }, { "epoch": 15.554016620498615, "grad_norm": 0.6139945983886719, "learning_rate": 2.5281e-05, "loss": 0.0233, "step": 8430 }, { "epoch": 15.555863342566944, "grad_norm": 0.8562393188476562, "learning_rate": 2.5284e-05, "loss": 0.2325, "step": 8431 }, { "epoch": 15.557710064635273, "grad_norm": 1.1304420232772827, "learning_rate": 2.5287e-05, "loss": 0.1774, "step": 8432 }, { "epoch": 15.5595567867036, "grad_norm": 1.1057580709457397, "learning_rate": 2.529e-05, "loss": 0.166, "step": 8433 }, { "epoch": 15.56140350877193, "grad_norm": 0.7534903287887573, "learning_rate": 2.5293e-05, "loss": 0.1355, "step": 8434 }, { "epoch": 15.563250230840259, "grad_norm": 0.6858900785446167, "learning_rate": 2.5296e-05, "loss": 0.1177, "step": 8435 }, { "epoch": 15.565096952908588, "grad_norm": 0.4988061785697937, "learning_rate": 2.5299000000000003e-05, "loss": 0.098, "step": 8436 }, { "epoch": 15.566943674976915, "grad_norm": 0.6958936452865601, "learning_rate": 2.5302000000000003e-05, "loss": 0.1319, "step": 8437 }, { "epoch": 15.568790397045245, "grad_norm": 0.7454564571380615, "learning_rate": 2.5305000000000003e-05, "loss": 0.1212, "step": 8438 }, { "epoch": 15.570637119113574, "grad_norm": 0.7997215390205383, "learning_rate": 2.5308000000000002e-05, "loss": 0.0711, "step": 8439 }, { "epoch": 15.572483841181903, "grad_norm": 0.5632338523864746, "learning_rate": 2.5311000000000002e-05, "loss": 0.113, "step": 8440 }, { "epoch": 15.57433056325023, "grad_norm": 0.759938657283783, "learning_rate": 2.5314000000000002e-05, "loss": 0.0797, "step": 8441 }, { "epoch": 15.57617728531856, "grad_norm": 1.036684513092041, "learning_rate": 2.5317000000000002e-05, "loss": 0.1144, "step": 8442 }, { "epoch": 15.578024007386889, "grad_norm": 0.31157785654067993, "learning_rate": 2.5319999999999998e-05, "loss": 0.0285, "step": 8443 }, { "epoch": 15.579870729455218, "grad_norm": 0.3606410324573517, "learning_rate": 2.5322999999999998e-05, "loss": 0.0345, "step": 8444 }, { "epoch": 15.581717451523545, "grad_norm": 0.3788275420665741, "learning_rate": 2.5325999999999998e-05, "loss": 0.026, "step": 8445 }, { "epoch": 15.583564173591874, "grad_norm": 0.30666303634643555, "learning_rate": 2.5329e-05, "loss": 0.0198, "step": 8446 }, { "epoch": 15.585410895660203, "grad_norm": 0.7083009481430054, "learning_rate": 2.5332e-05, "loss": 0.0227, "step": 8447 }, { "epoch": 15.587257617728532, "grad_norm": 0.21037326753139496, "learning_rate": 2.5335e-05, "loss": 0.014, "step": 8448 }, { "epoch": 15.58910433979686, "grad_norm": 0.3480874300003052, "learning_rate": 2.5338e-05, "loss": 0.0089, "step": 8449 }, { "epoch": 15.590951061865189, "grad_norm": 0.843414306640625, "learning_rate": 2.5341e-05, "loss": 0.0191, "step": 8450 }, { "epoch": 15.592797783933518, "grad_norm": 0.8120933175086975, "learning_rate": 2.5344e-05, "loss": 0.0163, "step": 8451 }, { "epoch": 15.594644506001847, "grad_norm": 0.38233235478401184, "learning_rate": 2.5347e-05, "loss": 0.0201, "step": 8452 }, { "epoch": 15.596491228070175, "grad_norm": 0.289878785610199, "learning_rate": 2.535e-05, "loss": 0.0105, "step": 8453 }, { "epoch": 15.598337950138504, "grad_norm": 0.35070326924324036, "learning_rate": 2.5353e-05, "loss": 0.0175, "step": 8454 }, { "epoch": 15.600184672206833, "grad_norm": 0.40063390135765076, "learning_rate": 2.5356e-05, "loss": 0.0099, "step": 8455 }, { "epoch": 15.602031394275162, "grad_norm": 0.18869224190711975, "learning_rate": 2.5359000000000002e-05, "loss": 0.0083, "step": 8456 }, { "epoch": 15.603878116343491, "grad_norm": 0.39934617280960083, "learning_rate": 2.5362000000000002e-05, "loss": 0.0146, "step": 8457 }, { "epoch": 15.605724838411819, "grad_norm": 0.4403945803642273, "learning_rate": 2.5365000000000002e-05, "loss": 0.0167, "step": 8458 }, { "epoch": 15.607571560480148, "grad_norm": 0.39084070920944214, "learning_rate": 2.5368000000000002e-05, "loss": 0.0118, "step": 8459 }, { "epoch": 15.609418282548477, "grad_norm": 0.7717267870903015, "learning_rate": 2.5371e-05, "loss": 0.0245, "step": 8460 }, { "epoch": 15.611265004616806, "grad_norm": 0.38657015562057495, "learning_rate": 2.5374e-05, "loss": 0.0112, "step": 8461 }, { "epoch": 15.613111726685133, "grad_norm": 0.28539496660232544, "learning_rate": 2.5377e-05, "loss": 0.0134, "step": 8462 }, { "epoch": 15.614958448753463, "grad_norm": 0.3543245494365692, "learning_rate": 2.538e-05, "loss": 0.0148, "step": 8463 }, { "epoch": 15.616805170821792, "grad_norm": 0.3170192241668701, "learning_rate": 2.5383e-05, "loss": 0.0177, "step": 8464 }, { "epoch": 15.61865189289012, "grad_norm": 0.5334312915802002, "learning_rate": 2.5386e-05, "loss": 0.0139, "step": 8465 }, { "epoch": 15.620498614958448, "grad_norm": 0.700171947479248, "learning_rate": 2.5389000000000004e-05, "loss": 0.0322, "step": 8466 }, { "epoch": 15.622345337026777, "grad_norm": 0.6469569206237793, "learning_rate": 2.5392000000000004e-05, "loss": 0.0183, "step": 8467 }, { "epoch": 15.624192059095106, "grad_norm": 0.9562910199165344, "learning_rate": 2.5395e-05, "loss": 0.0204, "step": 8468 }, { "epoch": 15.626038781163436, "grad_norm": 0.4306713938713074, "learning_rate": 2.5398e-05, "loss": 0.012, "step": 8469 }, { "epoch": 15.627885503231763, "grad_norm": 1.4701281785964966, "learning_rate": 2.5401e-05, "loss": 0.0265, "step": 8470 }, { "epoch": 15.629732225300092, "grad_norm": 0.3720110058784485, "learning_rate": 2.5404e-05, "loss": 0.0154, "step": 8471 }, { "epoch": 15.631578947368421, "grad_norm": 0.4950038492679596, "learning_rate": 2.5407e-05, "loss": 0.0166, "step": 8472 }, { "epoch": 15.63342566943675, "grad_norm": 0.5569928288459778, "learning_rate": 2.541e-05, "loss": 0.0178, "step": 8473 }, { "epoch": 15.635272391505078, "grad_norm": 0.33441534638404846, "learning_rate": 2.5413e-05, "loss": 0.0121, "step": 8474 }, { "epoch": 15.637119113573407, "grad_norm": 0.2959272563457489, "learning_rate": 2.5416e-05, "loss": 0.0163, "step": 8475 }, { "epoch": 15.638965835641736, "grad_norm": 0.4156274199485779, "learning_rate": 2.5419000000000002e-05, "loss": 0.0141, "step": 8476 }, { "epoch": 15.640812557710065, "grad_norm": 0.4267812669277191, "learning_rate": 2.5422e-05, "loss": 0.0313, "step": 8477 }, { "epoch": 15.642659279778393, "grad_norm": 0.5098170042037964, "learning_rate": 2.5425e-05, "loss": 0.0209, "step": 8478 }, { "epoch": 15.644506001846722, "grad_norm": 0.3128315806388855, "learning_rate": 2.5428e-05, "loss": 0.014, "step": 8479 }, { "epoch": 15.64635272391505, "grad_norm": 1.0015007257461548, "learning_rate": 2.5431e-05, "loss": 0.0429, "step": 8480 }, { "epoch": 15.64819944598338, "grad_norm": 1.4465745687484741, "learning_rate": 2.5434e-05, "loss": 0.2054, "step": 8481 }, { "epoch": 15.65004616805171, "grad_norm": 0.8323967456817627, "learning_rate": 2.5437e-05, "loss": 0.2016, "step": 8482 }, { "epoch": 15.651892890120036, "grad_norm": 0.5159797668457031, "learning_rate": 2.544e-05, "loss": 0.0936, "step": 8483 }, { "epoch": 15.653739612188366, "grad_norm": 0.5815988183021545, "learning_rate": 2.5443e-05, "loss": 0.1431, "step": 8484 }, { "epoch": 15.655586334256695, "grad_norm": 0.6828600168228149, "learning_rate": 2.5446e-05, "loss": 0.1107, "step": 8485 }, { "epoch": 15.657433056325024, "grad_norm": 0.6418094635009766, "learning_rate": 2.5449000000000003e-05, "loss": 0.1151, "step": 8486 }, { "epoch": 15.659279778393351, "grad_norm": 0.5052107572555542, "learning_rate": 2.5452000000000003e-05, "loss": 0.1149, "step": 8487 }, { "epoch": 15.66112650046168, "grad_norm": 0.4315268099308014, "learning_rate": 2.5455000000000003e-05, "loss": 0.1143, "step": 8488 }, { "epoch": 15.66297322253001, "grad_norm": 0.49266499280929565, "learning_rate": 2.5458000000000003e-05, "loss": 0.0622, "step": 8489 }, { "epoch": 15.664819944598339, "grad_norm": 1.084386944770813, "learning_rate": 2.5461000000000002e-05, "loss": 0.0626, "step": 8490 }, { "epoch": 15.666666666666666, "grad_norm": 0.5839085578918457, "learning_rate": 2.5464000000000002e-05, "loss": 0.0729, "step": 8491 }, { "epoch": 15.668513388734995, "grad_norm": 0.5455254316329956, "learning_rate": 2.5467e-05, "loss": 0.0823, "step": 8492 }, { "epoch": 15.670360110803324, "grad_norm": 1.681470513343811, "learning_rate": 2.547e-05, "loss": 0.1319, "step": 8493 }, { "epoch": 15.672206832871654, "grad_norm": 0.481160432100296, "learning_rate": 2.5472999999999998e-05, "loss": 0.0468, "step": 8494 }, { "epoch": 15.67405355493998, "grad_norm": 0.4013616442680359, "learning_rate": 2.5475999999999998e-05, "loss": 0.0352, "step": 8495 }, { "epoch": 15.67590027700831, "grad_norm": 2.3175511360168457, "learning_rate": 2.5479e-05, "loss": 0.046, "step": 8496 }, { "epoch": 15.67774699907664, "grad_norm": 0.27709218859672546, "learning_rate": 2.5482e-05, "loss": 0.0217, "step": 8497 }, { "epoch": 15.679593721144968, "grad_norm": 0.4663790762424469, "learning_rate": 2.5485e-05, "loss": 0.0275, "step": 8498 }, { "epoch": 15.681440443213296, "grad_norm": 0.40022438764572144, "learning_rate": 2.5488e-05, "loss": 0.0344, "step": 8499 }, { "epoch": 15.683287165281625, "grad_norm": 0.29696258902549744, "learning_rate": 2.5491e-05, "loss": 0.0146, "step": 8500 }, { "epoch": 15.685133887349954, "grad_norm": 0.22244930267333984, "learning_rate": 2.5494e-05, "loss": 0.0199, "step": 8501 }, { "epoch": 15.686980609418283, "grad_norm": 0.2986993193626404, "learning_rate": 2.5497e-05, "loss": 0.0203, "step": 8502 }, { "epoch": 15.68882733148661, "grad_norm": 0.7852336764335632, "learning_rate": 2.55e-05, "loss": 0.0247, "step": 8503 }, { "epoch": 15.69067405355494, "grad_norm": 0.6283233761787415, "learning_rate": 2.5503e-05, "loss": 0.037, "step": 8504 }, { "epoch": 15.692520775623269, "grad_norm": 0.544381320476532, "learning_rate": 2.5506e-05, "loss": 0.0193, "step": 8505 }, { "epoch": 15.694367497691598, "grad_norm": 0.7549434304237366, "learning_rate": 2.5509e-05, "loss": 0.0537, "step": 8506 }, { "epoch": 15.696214219759927, "grad_norm": 0.407317191362381, "learning_rate": 2.5512000000000002e-05, "loss": 0.0351, "step": 8507 }, { "epoch": 15.698060941828254, "grad_norm": 0.3477088510990143, "learning_rate": 2.5515000000000002e-05, "loss": 0.0156, "step": 8508 }, { "epoch": 15.699907663896584, "grad_norm": 0.2679806351661682, "learning_rate": 2.5518000000000002e-05, "loss": 0.0121, "step": 8509 }, { "epoch": 15.701754385964913, "grad_norm": 0.42538949847221375, "learning_rate": 2.5521000000000002e-05, "loss": 0.013, "step": 8510 }, { "epoch": 15.703601108033242, "grad_norm": 0.5099429488182068, "learning_rate": 2.5524e-05, "loss": 0.0166, "step": 8511 }, { "epoch": 15.70544783010157, "grad_norm": 0.26436755061149597, "learning_rate": 2.5527e-05, "loss": 0.0109, "step": 8512 }, { "epoch": 15.707294552169898, "grad_norm": 0.28746193647384644, "learning_rate": 2.553e-05, "loss": 0.0149, "step": 8513 }, { "epoch": 15.709141274238227, "grad_norm": 0.48255273699760437, "learning_rate": 2.5533e-05, "loss": 0.014, "step": 8514 }, { "epoch": 15.710987996306557, "grad_norm": 0.7732357978820801, "learning_rate": 2.5536e-05, "loss": 0.0217, "step": 8515 }, { "epoch": 15.712834718374884, "grad_norm": 0.40901118516921997, "learning_rate": 2.5539e-05, "loss": 0.0213, "step": 8516 }, { "epoch": 15.714681440443213, "grad_norm": 0.4318062365055084, "learning_rate": 2.5542e-05, "loss": 0.0158, "step": 8517 }, { "epoch": 15.716528162511542, "grad_norm": 0.5730619430541992, "learning_rate": 2.5545e-05, "loss": 0.0102, "step": 8518 }, { "epoch": 15.718374884579871, "grad_norm": 0.5702316761016846, "learning_rate": 2.5548e-05, "loss": 0.0291, "step": 8519 }, { "epoch": 15.720221606648199, "grad_norm": 0.4010271728038788, "learning_rate": 2.5551e-05, "loss": 0.016, "step": 8520 }, { "epoch": 15.722068328716528, "grad_norm": 0.5954778790473938, "learning_rate": 2.5554e-05, "loss": 0.0295, "step": 8521 }, { "epoch": 15.723915050784857, "grad_norm": 0.2628062963485718, "learning_rate": 2.5557e-05, "loss": 0.0097, "step": 8522 }, { "epoch": 15.725761772853186, "grad_norm": 0.8188832998275757, "learning_rate": 2.556e-05, "loss": 0.0128, "step": 8523 }, { "epoch": 15.727608494921514, "grad_norm": 0.6020427346229553, "learning_rate": 2.5563e-05, "loss": 0.0184, "step": 8524 }, { "epoch": 15.729455216989843, "grad_norm": 0.31666770577430725, "learning_rate": 2.5566e-05, "loss": 0.0136, "step": 8525 }, { "epoch": 15.731301939058172, "grad_norm": 0.6543294787406921, "learning_rate": 2.5569e-05, "loss": 0.0218, "step": 8526 }, { "epoch": 15.733148661126501, "grad_norm": 0.3938964903354645, "learning_rate": 2.5572000000000002e-05, "loss": 0.0089, "step": 8527 }, { "epoch": 15.734995383194828, "grad_norm": 0.43330466747283936, "learning_rate": 2.5575e-05, "loss": 0.0204, "step": 8528 }, { "epoch": 15.736842105263158, "grad_norm": 0.834074079990387, "learning_rate": 2.5578e-05, "loss": 0.02, "step": 8529 }, { "epoch": 15.738688827331487, "grad_norm": 1.3514752388000488, "learning_rate": 2.5581e-05, "loss": 0.0343, "step": 8530 }, { "epoch": 15.740535549399816, "grad_norm": 1.174109935760498, "learning_rate": 2.5584e-05, "loss": 0.2363, "step": 8531 }, { "epoch": 15.742382271468145, "grad_norm": 0.7513344883918762, "learning_rate": 2.5587e-05, "loss": 0.2025, "step": 8532 }, { "epoch": 15.744228993536472, "grad_norm": 0.631834089756012, "learning_rate": 2.559e-05, "loss": 0.1719, "step": 8533 }, { "epoch": 15.746075715604801, "grad_norm": 0.7519591450691223, "learning_rate": 2.5593e-05, "loss": 0.1275, "step": 8534 }, { "epoch": 15.74792243767313, "grad_norm": 0.5671206712722778, "learning_rate": 2.5596e-05, "loss": 0.1259, "step": 8535 }, { "epoch": 15.749769159741458, "grad_norm": 0.6367517113685608, "learning_rate": 2.5599e-05, "loss": 0.1023, "step": 8536 }, { "epoch": 15.751615881809787, "grad_norm": 0.4786352515220642, "learning_rate": 2.5602000000000003e-05, "loss": 0.0637, "step": 8537 }, { "epoch": 15.753462603878116, "grad_norm": 0.5861334204673767, "learning_rate": 2.5605000000000003e-05, "loss": 0.0849, "step": 8538 }, { "epoch": 15.755309325946445, "grad_norm": 0.9659382700920105, "learning_rate": 2.5608000000000003e-05, "loss": 0.1039, "step": 8539 }, { "epoch": 15.757156048014775, "grad_norm": 0.48477768898010254, "learning_rate": 2.5611000000000003e-05, "loss": 0.061, "step": 8540 }, { "epoch": 15.759002770083102, "grad_norm": 0.4459925591945648, "learning_rate": 2.5614000000000002e-05, "loss": 0.0569, "step": 8541 }, { "epoch": 15.760849492151431, "grad_norm": 0.7846724390983582, "learning_rate": 2.5617e-05, "loss": 0.0975, "step": 8542 }, { "epoch": 15.76269621421976, "grad_norm": 0.4942132830619812, "learning_rate": 2.562e-05, "loss": 0.0602, "step": 8543 }, { "epoch": 15.76454293628809, "grad_norm": 0.47594746947288513, "learning_rate": 2.5623e-05, "loss": 0.0484, "step": 8544 }, { "epoch": 15.766389658356417, "grad_norm": 0.2541988492012024, "learning_rate": 2.5625999999999998e-05, "loss": 0.031, "step": 8545 }, { "epoch": 15.768236380424746, "grad_norm": 0.37014615535736084, "learning_rate": 2.5628999999999998e-05, "loss": 0.0276, "step": 8546 }, { "epoch": 15.770083102493075, "grad_norm": 0.47405698895454407, "learning_rate": 2.5632e-05, "loss": 0.0331, "step": 8547 }, { "epoch": 15.771929824561404, "grad_norm": 0.3453981578350067, "learning_rate": 2.5635e-05, "loss": 0.0446, "step": 8548 }, { "epoch": 15.773776546629731, "grad_norm": 0.5440641045570374, "learning_rate": 2.5638e-05, "loss": 0.0215, "step": 8549 }, { "epoch": 15.77562326869806, "grad_norm": 0.20974309742450714, "learning_rate": 2.5641e-05, "loss": 0.0206, "step": 8550 }, { "epoch": 15.77746999076639, "grad_norm": 0.6189224123954773, "learning_rate": 2.5644e-05, "loss": 0.028, "step": 8551 }, { "epoch": 15.779316712834719, "grad_norm": 0.48690396547317505, "learning_rate": 2.5647e-05, "loss": 0.0184, "step": 8552 }, { "epoch": 15.781163434903046, "grad_norm": 0.593185544013977, "learning_rate": 2.565e-05, "loss": 0.025, "step": 8553 }, { "epoch": 15.783010156971375, "grad_norm": 1.684084415435791, "learning_rate": 2.5653e-05, "loss": 0.0218, "step": 8554 }, { "epoch": 15.784856879039705, "grad_norm": 0.21022409200668335, "learning_rate": 2.5656e-05, "loss": 0.0134, "step": 8555 }, { "epoch": 15.786703601108034, "grad_norm": 0.37620681524276733, "learning_rate": 2.5659e-05, "loss": 0.0152, "step": 8556 }, { "epoch": 15.788550323176363, "grad_norm": 0.2804875671863556, "learning_rate": 2.5662000000000003e-05, "loss": 0.0231, "step": 8557 }, { "epoch": 15.79039704524469, "grad_norm": 0.25551798939704895, "learning_rate": 2.5665000000000002e-05, "loss": 0.0104, "step": 8558 }, { "epoch": 15.79224376731302, "grad_norm": 0.3816471993923187, "learning_rate": 2.5668000000000002e-05, "loss": 0.0161, "step": 8559 }, { "epoch": 15.794090489381349, "grad_norm": 0.6349307298660278, "learning_rate": 2.5671000000000002e-05, "loss": 0.0188, "step": 8560 }, { "epoch": 15.795937211449676, "grad_norm": 0.37047889828681946, "learning_rate": 2.5674000000000002e-05, "loss": 0.041, "step": 8561 }, { "epoch": 15.797783933518005, "grad_norm": 0.6681861877441406, "learning_rate": 2.5677e-05, "loss": 0.0179, "step": 8562 }, { "epoch": 15.799630655586334, "grad_norm": 0.661729633808136, "learning_rate": 2.568e-05, "loss": 0.0259, "step": 8563 }, { "epoch": 15.801477377654663, "grad_norm": 0.3159634470939636, "learning_rate": 2.5683e-05, "loss": 0.0194, "step": 8564 }, { "epoch": 15.803324099722992, "grad_norm": 0.30529919266700745, "learning_rate": 2.5686e-05, "loss": 0.0103, "step": 8565 }, { "epoch": 15.80517082179132, "grad_norm": 0.4781964421272278, "learning_rate": 2.5688999999999997e-05, "loss": 0.0195, "step": 8566 }, { "epoch": 15.807017543859649, "grad_norm": 0.3040353059768677, "learning_rate": 2.5692e-05, "loss": 0.0169, "step": 8567 }, { "epoch": 15.808864265927978, "grad_norm": 0.6087676882743835, "learning_rate": 2.5695e-05, "loss": 0.0193, "step": 8568 }, { "epoch": 15.810710987996307, "grad_norm": 0.5178345441818237, "learning_rate": 2.5698e-05, "loss": 0.0268, "step": 8569 }, { "epoch": 15.812557710064635, "grad_norm": 0.3992609679698944, "learning_rate": 2.5701e-05, "loss": 0.0255, "step": 8570 }, { "epoch": 15.814404432132964, "grad_norm": 0.404962956905365, "learning_rate": 2.5704e-05, "loss": 0.0186, "step": 8571 }, { "epoch": 15.816251154201293, "grad_norm": 0.4101698696613312, "learning_rate": 2.5707e-05, "loss": 0.0143, "step": 8572 }, { "epoch": 15.818097876269622, "grad_norm": 0.34589701890945435, "learning_rate": 2.571e-05, "loss": 0.0072, "step": 8573 }, { "epoch": 15.81994459833795, "grad_norm": 0.33388593792915344, "learning_rate": 2.5713e-05, "loss": 0.0127, "step": 8574 }, { "epoch": 15.821791320406279, "grad_norm": 0.5369555354118347, "learning_rate": 2.5716e-05, "loss": 0.0251, "step": 8575 }, { "epoch": 15.823638042474608, "grad_norm": 0.7632960677146912, "learning_rate": 2.5719e-05, "loss": 0.0163, "step": 8576 }, { "epoch": 15.825484764542937, "grad_norm": 0.38925424218177795, "learning_rate": 2.5722000000000002e-05, "loss": 0.013, "step": 8577 }, { "epoch": 15.827331486611264, "grad_norm": 0.43634846806526184, "learning_rate": 2.5725000000000002e-05, "loss": 0.026, "step": 8578 }, { "epoch": 15.829178208679593, "grad_norm": 0.60167396068573, "learning_rate": 2.5728e-05, "loss": 0.0209, "step": 8579 }, { "epoch": 15.831024930747922, "grad_norm": 2.016740322113037, "learning_rate": 2.5731e-05, "loss": 0.0264, "step": 8580 }, { "epoch": 15.832871652816252, "grad_norm": 0.6113871932029724, "learning_rate": 2.5734e-05, "loss": 0.1981, "step": 8581 }, { "epoch": 15.83471837488458, "grad_norm": 0.5366085767745972, "learning_rate": 2.5737e-05, "loss": 0.1967, "step": 8582 }, { "epoch": 15.836565096952908, "grad_norm": 0.720666229724884, "learning_rate": 2.574e-05, "loss": 0.1343, "step": 8583 }, { "epoch": 15.838411819021237, "grad_norm": 0.5490646958351135, "learning_rate": 2.5743e-05, "loss": 0.1563, "step": 8584 }, { "epoch": 15.840258541089566, "grad_norm": 1.0201573371887207, "learning_rate": 2.5746e-05, "loss": 0.1194, "step": 8585 }, { "epoch": 15.842105263157894, "grad_norm": 0.49989888072013855, "learning_rate": 2.5749e-05, "loss": 0.1354, "step": 8586 }, { "epoch": 15.843951985226223, "grad_norm": 0.3904455602169037, "learning_rate": 2.5752000000000003e-05, "loss": 0.0918, "step": 8587 }, { "epoch": 15.845798707294552, "grad_norm": 0.5409448146820068, "learning_rate": 2.5755000000000003e-05, "loss": 0.0916, "step": 8588 }, { "epoch": 15.847645429362881, "grad_norm": 0.6536295413970947, "learning_rate": 2.5758000000000003e-05, "loss": 0.113, "step": 8589 }, { "epoch": 15.84949215143121, "grad_norm": 0.5758427381515503, "learning_rate": 2.5761000000000003e-05, "loss": 0.0632, "step": 8590 }, { "epoch": 15.851338873499538, "grad_norm": 0.7237721085548401, "learning_rate": 2.5764e-05, "loss": 0.0726, "step": 8591 }, { "epoch": 15.853185595567867, "grad_norm": 0.30059942603111267, "learning_rate": 2.5767e-05, "loss": 0.0357, "step": 8592 }, { "epoch": 15.855032317636196, "grad_norm": 0.26866260170936584, "learning_rate": 2.577e-05, "loss": 0.0456, "step": 8593 }, { "epoch": 15.856879039704525, "grad_norm": 0.506643533706665, "learning_rate": 2.5773e-05, "loss": 0.0887, "step": 8594 }, { "epoch": 15.858725761772853, "grad_norm": 0.31971853971481323, "learning_rate": 2.5776e-05, "loss": 0.0334, "step": 8595 }, { "epoch": 15.860572483841182, "grad_norm": 0.30274417996406555, "learning_rate": 2.5779e-05, "loss": 0.045, "step": 8596 }, { "epoch": 15.86241920590951, "grad_norm": 0.44081827998161316, "learning_rate": 2.5782e-05, "loss": 0.0305, "step": 8597 }, { "epoch": 15.86426592797784, "grad_norm": 0.6209914684295654, "learning_rate": 2.5785e-05, "loss": 0.0405, "step": 8598 }, { "epoch": 15.866112650046167, "grad_norm": 1.5701161623001099, "learning_rate": 2.5788e-05, "loss": 0.0168, "step": 8599 }, { "epoch": 15.867959372114496, "grad_norm": 0.6633787155151367, "learning_rate": 2.5791e-05, "loss": 0.0203, "step": 8600 }, { "epoch": 15.869806094182826, "grad_norm": 0.3315002918243408, "learning_rate": 2.5794e-05, "loss": 0.0113, "step": 8601 }, { "epoch": 15.871652816251155, "grad_norm": 0.44118741154670715, "learning_rate": 2.5797e-05, "loss": 0.0359, "step": 8602 }, { "epoch": 15.873499538319482, "grad_norm": 0.8586814999580383, "learning_rate": 2.58e-05, "loss": 0.0212, "step": 8603 }, { "epoch": 15.875346260387811, "grad_norm": 0.6964927911758423, "learning_rate": 2.5803e-05, "loss": 0.0188, "step": 8604 }, { "epoch": 15.87719298245614, "grad_norm": 0.4353424608707428, "learning_rate": 2.5806e-05, "loss": 0.0134, "step": 8605 }, { "epoch": 15.87903970452447, "grad_norm": 0.43677806854248047, "learning_rate": 2.5809e-05, "loss": 0.0218, "step": 8606 }, { "epoch": 15.880886426592799, "grad_norm": 0.41412249207496643, "learning_rate": 2.5812000000000003e-05, "loss": 0.0115, "step": 8607 }, { "epoch": 15.882733148661126, "grad_norm": 0.3291482627391815, "learning_rate": 2.5815000000000003e-05, "loss": 0.0131, "step": 8608 }, { "epoch": 15.884579870729455, "grad_norm": 0.683460533618927, "learning_rate": 2.5818000000000003e-05, "loss": 0.0141, "step": 8609 }, { "epoch": 15.886426592797784, "grad_norm": 0.6479490995407104, "learning_rate": 2.5821000000000002e-05, "loss": 0.0231, "step": 8610 }, { "epoch": 15.888273314866112, "grad_norm": 0.259980171918869, "learning_rate": 2.5824000000000002e-05, "loss": 0.0112, "step": 8611 }, { "epoch": 15.89012003693444, "grad_norm": 0.2572813630104065, "learning_rate": 2.5827000000000002e-05, "loss": 0.0096, "step": 8612 }, { "epoch": 15.89196675900277, "grad_norm": 0.45563334226608276, "learning_rate": 2.5830000000000002e-05, "loss": 0.0119, "step": 8613 }, { "epoch": 15.8938134810711, "grad_norm": 0.31853538751602173, "learning_rate": 2.5833e-05, "loss": 0.0119, "step": 8614 }, { "epoch": 15.895660203139428, "grad_norm": 0.2511363923549652, "learning_rate": 2.5835999999999998e-05, "loss": 0.014, "step": 8615 }, { "epoch": 15.897506925207756, "grad_norm": 0.24003857374191284, "learning_rate": 2.5838999999999998e-05, "loss": 0.0103, "step": 8616 }, { "epoch": 15.899353647276085, "grad_norm": 0.3936110734939575, "learning_rate": 2.5842e-05, "loss": 0.0221, "step": 8617 }, { "epoch": 15.901200369344414, "grad_norm": 0.519484281539917, "learning_rate": 2.5845e-05, "loss": 0.0199, "step": 8618 }, { "epoch": 15.903047091412743, "grad_norm": 0.9927149415016174, "learning_rate": 2.5848e-05, "loss": 0.022, "step": 8619 }, { "epoch": 15.90489381348107, "grad_norm": 0.8863538503646851, "learning_rate": 2.5851e-05, "loss": 0.0233, "step": 8620 }, { "epoch": 15.9067405355494, "grad_norm": 0.2927561402320862, "learning_rate": 2.5854e-05, "loss": 0.012, "step": 8621 }, { "epoch": 15.908587257617729, "grad_norm": 0.6133517026901245, "learning_rate": 2.5857e-05, "loss": 0.0304, "step": 8622 }, { "epoch": 15.910433979686058, "grad_norm": 0.5118392109870911, "learning_rate": 2.586e-05, "loss": 0.0228, "step": 8623 }, { "epoch": 15.912280701754385, "grad_norm": 0.42478421330451965, "learning_rate": 2.5863e-05, "loss": 0.0119, "step": 8624 }, { "epoch": 15.914127423822714, "grad_norm": 0.46227601170539856, "learning_rate": 2.5866e-05, "loss": 0.0203, "step": 8625 }, { "epoch": 15.915974145891044, "grad_norm": 0.6137253642082214, "learning_rate": 2.5869e-05, "loss": 0.014, "step": 8626 }, { "epoch": 15.917820867959373, "grad_norm": 0.7873435616493225, "learning_rate": 2.5872000000000002e-05, "loss": 0.0196, "step": 8627 }, { "epoch": 15.9196675900277, "grad_norm": 1.0679621696472168, "learning_rate": 2.5875000000000002e-05, "loss": 0.0282, "step": 8628 }, { "epoch": 15.92151431209603, "grad_norm": 0.3143375813961029, "learning_rate": 2.5878000000000002e-05, "loss": 0.0101, "step": 8629 }, { "epoch": 15.923361034164358, "grad_norm": 1.0865501165390015, "learning_rate": 2.5881000000000002e-05, "loss": 0.0225, "step": 8630 }, { "epoch": 15.925207756232687, "grad_norm": 1.2255518436431885, "learning_rate": 2.5884e-05, "loss": 0.2034, "step": 8631 }, { "epoch": 15.927054478301017, "grad_norm": 0.8532062768936157, "learning_rate": 2.5887e-05, "loss": 0.1659, "step": 8632 }, { "epoch": 15.928901200369344, "grad_norm": 0.46837377548217773, "learning_rate": 2.589e-05, "loss": 0.1113, "step": 8633 }, { "epoch": 15.930747922437673, "grad_norm": 0.45477795600891113, "learning_rate": 2.5893e-05, "loss": 0.1057, "step": 8634 }, { "epoch": 15.932594644506002, "grad_norm": 0.715966522693634, "learning_rate": 2.5896e-05, "loss": 0.1335, "step": 8635 }, { "epoch": 15.93444136657433, "grad_norm": 0.5230908989906311, "learning_rate": 2.5899e-05, "loss": 0.0844, "step": 8636 }, { "epoch": 15.936288088642659, "grad_norm": 0.49790942668914795, "learning_rate": 2.5902e-05, "loss": 0.0792, "step": 8637 }, { "epoch": 15.938134810710988, "grad_norm": 0.6336291432380676, "learning_rate": 2.5905000000000004e-05, "loss": 0.0816, "step": 8638 }, { "epoch": 15.939981532779317, "grad_norm": 0.4619077444076538, "learning_rate": 2.5908000000000003e-05, "loss": 0.0695, "step": 8639 }, { "epoch": 15.941828254847646, "grad_norm": 0.4522983431816101, "learning_rate": 2.5911e-05, "loss": 0.0567, "step": 8640 }, { "epoch": 15.943674976915974, "grad_norm": 0.4374760687351227, "learning_rate": 2.5914e-05, "loss": 0.0483, "step": 8641 }, { "epoch": 15.945521698984303, "grad_norm": 0.36170950531959534, "learning_rate": 2.5917e-05, "loss": 0.0285, "step": 8642 }, { "epoch": 15.947368421052632, "grad_norm": 0.39567574858665466, "learning_rate": 2.592e-05, "loss": 0.0634, "step": 8643 }, { "epoch": 15.949215143120961, "grad_norm": 0.4324873089790344, "learning_rate": 2.5923e-05, "loss": 0.0238, "step": 8644 }, { "epoch": 15.951061865189288, "grad_norm": 0.4158211648464203, "learning_rate": 2.5926e-05, "loss": 0.0155, "step": 8645 }, { "epoch": 15.952908587257618, "grad_norm": 0.45500287413597107, "learning_rate": 2.5929e-05, "loss": 0.0335, "step": 8646 }, { "epoch": 15.954755309325947, "grad_norm": 0.3046945035457611, "learning_rate": 2.5932e-05, "loss": 0.0179, "step": 8647 }, { "epoch": 15.956602031394276, "grad_norm": 0.4032018780708313, "learning_rate": 2.5935e-05, "loss": 0.0238, "step": 8648 }, { "epoch": 15.958448753462603, "grad_norm": 0.3440537750720978, "learning_rate": 2.5938e-05, "loss": 0.0231, "step": 8649 }, { "epoch": 15.960295475530932, "grad_norm": 0.5092036724090576, "learning_rate": 2.5941e-05, "loss": 0.0209, "step": 8650 }, { "epoch": 15.962142197599261, "grad_norm": 0.3283311128616333, "learning_rate": 2.5944e-05, "loss": 0.0182, "step": 8651 }, { "epoch": 15.96398891966759, "grad_norm": 0.3121730387210846, "learning_rate": 2.5947e-05, "loss": 0.0148, "step": 8652 }, { "epoch": 15.965835641735918, "grad_norm": 0.9534308314323425, "learning_rate": 2.595e-05, "loss": 0.0242, "step": 8653 }, { "epoch": 15.967682363804247, "grad_norm": 0.47783422470092773, "learning_rate": 2.5953e-05, "loss": 0.0173, "step": 8654 }, { "epoch": 15.969529085872576, "grad_norm": 0.43015190958976746, "learning_rate": 2.5956e-05, "loss": 0.0154, "step": 8655 }, { "epoch": 15.971375807940905, "grad_norm": 0.25951239466667175, "learning_rate": 2.5959e-05, "loss": 0.0139, "step": 8656 }, { "epoch": 15.973222530009235, "grad_norm": 0.45808809995651245, "learning_rate": 2.5962e-05, "loss": 0.0122, "step": 8657 }, { "epoch": 15.975069252077562, "grad_norm": 0.4710130989551544, "learning_rate": 2.5965000000000003e-05, "loss": 0.0117, "step": 8658 }, { "epoch": 15.976915974145891, "grad_norm": 0.3743680715560913, "learning_rate": 2.5968000000000003e-05, "loss": 0.0205, "step": 8659 }, { "epoch": 15.97876269621422, "grad_norm": 0.24802172183990479, "learning_rate": 2.5971000000000003e-05, "loss": 0.0124, "step": 8660 }, { "epoch": 15.980609418282548, "grad_norm": 0.5540706515312195, "learning_rate": 2.5974000000000002e-05, "loss": 0.018, "step": 8661 }, { "epoch": 15.982456140350877, "grad_norm": 0.45025646686553955, "learning_rate": 2.5977000000000002e-05, "loss": 0.0117, "step": 8662 }, { "epoch": 15.984302862419206, "grad_norm": 0.5681506991386414, "learning_rate": 2.5980000000000002e-05, "loss": 0.0216, "step": 8663 }, { "epoch": 15.986149584487535, "grad_norm": 0.23212771117687225, "learning_rate": 2.5983000000000002e-05, "loss": 0.007, "step": 8664 }, { "epoch": 15.987996306555864, "grad_norm": 0.7229043841362, "learning_rate": 2.5985999999999998e-05, "loss": 0.021, "step": 8665 }, { "epoch": 15.989843028624191, "grad_norm": 0.3772416412830353, "learning_rate": 2.5988999999999998e-05, "loss": 0.0144, "step": 8666 }, { "epoch": 15.99168975069252, "grad_norm": 0.4555790424346924, "learning_rate": 2.5991999999999998e-05, "loss": 0.0222, "step": 8667 }, { "epoch": 15.99353647276085, "grad_norm": 0.5141981244087219, "learning_rate": 2.5995e-05, "loss": 0.0156, "step": 8668 }, { "epoch": 15.995383194829179, "grad_norm": 0.7101907730102539, "learning_rate": 2.5998e-05, "loss": 0.0207, "step": 8669 }, { "epoch": 15.997229916897506, "grad_norm": 0.2490791529417038, "learning_rate": 2.6001e-05, "loss": 0.0159, "step": 8670 }, { "epoch": 15.999076638965835, "grad_norm": 0.40662047266960144, "learning_rate": 2.6004e-05, "loss": 0.0144, "step": 8671 }, { "epoch": 16.0, "grad_norm": 0.4492111802101135, "learning_rate": 2.6007e-05, "loss": 0.0078, "step": 8672 }, { "epoch": 16.00184672206833, "grad_norm": 0.644540548324585, "learning_rate": 2.601e-05, "loss": 0.2018, "step": 8673 }, { "epoch": 16.00369344413666, "grad_norm": 0.6267561316490173, "learning_rate": 2.6013e-05, "loss": 0.1662, "step": 8674 }, { "epoch": 16.005540166204987, "grad_norm": 0.4369864761829376, "learning_rate": 2.6016e-05, "loss": 0.1051, "step": 8675 }, { "epoch": 16.007386888273317, "grad_norm": 0.6001611948013306, "learning_rate": 2.6019e-05, "loss": 0.119, "step": 8676 }, { "epoch": 16.009233610341642, "grad_norm": 0.46866223216056824, "learning_rate": 2.6022e-05, "loss": 0.0915, "step": 8677 }, { "epoch": 16.01108033240997, "grad_norm": 0.42183881998062134, "learning_rate": 2.6025000000000002e-05, "loss": 0.0853, "step": 8678 }, { "epoch": 16.0129270544783, "grad_norm": 0.6449214816093445, "learning_rate": 2.6028000000000002e-05, "loss": 0.1073, "step": 8679 }, { "epoch": 16.01477377654663, "grad_norm": 0.7335696816444397, "learning_rate": 2.6031000000000002e-05, "loss": 0.0924, "step": 8680 }, { "epoch": 16.01662049861496, "grad_norm": 0.7378032803535461, "learning_rate": 2.6034000000000002e-05, "loss": 0.1016, "step": 8681 }, { "epoch": 16.018467220683288, "grad_norm": 0.6013514399528503, "learning_rate": 2.6037e-05, "loss": 0.045, "step": 8682 }, { "epoch": 16.020313942751617, "grad_norm": 0.36441826820373535, "learning_rate": 2.604e-05, "loss": 0.0536, "step": 8683 }, { "epoch": 16.022160664819946, "grad_norm": 0.41581907868385315, "learning_rate": 2.6043e-05, "loss": 0.0506, "step": 8684 }, { "epoch": 16.02400738688827, "grad_norm": 0.29539382457733154, "learning_rate": 2.6046e-05, "loss": 0.0389, "step": 8685 }, { "epoch": 16.0258541089566, "grad_norm": 0.45296069979667664, "learning_rate": 2.6049e-05, "loss": 0.0536, "step": 8686 }, { "epoch": 16.02770083102493, "grad_norm": 0.5271998643875122, "learning_rate": 2.6052e-05, "loss": 0.0438, "step": 8687 }, { "epoch": 16.02954755309326, "grad_norm": 0.3643032908439636, "learning_rate": 2.6055000000000004e-05, "loss": 0.0438, "step": 8688 }, { "epoch": 16.03139427516159, "grad_norm": 0.3770063817501068, "learning_rate": 2.6058e-05, "loss": 0.0265, "step": 8689 }, { "epoch": 16.033240997229917, "grad_norm": 0.33927151560783386, "learning_rate": 2.6061e-05, "loss": 0.0203, "step": 8690 }, { "epoch": 16.035087719298247, "grad_norm": 0.34185677766799927, "learning_rate": 2.6064e-05, "loss": 0.0275, "step": 8691 }, { "epoch": 16.036934441366576, "grad_norm": 0.26765990257263184, "learning_rate": 2.6067e-05, "loss": 0.035, "step": 8692 }, { "epoch": 16.0387811634349, "grad_norm": 0.64670729637146, "learning_rate": 2.607e-05, "loss": 0.0165, "step": 8693 }, { "epoch": 16.04062788550323, "grad_norm": 0.32306674122810364, "learning_rate": 2.6073e-05, "loss": 0.0191, "step": 8694 }, { "epoch": 16.04247460757156, "grad_norm": 0.283189982175827, "learning_rate": 2.6076e-05, "loss": 0.0151, "step": 8695 }, { "epoch": 16.04432132963989, "grad_norm": 0.21966862678527832, "learning_rate": 2.6079e-05, "loss": 0.0103, "step": 8696 }, { "epoch": 16.046168051708218, "grad_norm": 0.30473917722702026, "learning_rate": 2.6082e-05, "loss": 0.0099, "step": 8697 }, { "epoch": 16.048014773776547, "grad_norm": 0.2754177451133728, "learning_rate": 2.6085000000000002e-05, "loss": 0.0066, "step": 8698 }, { "epoch": 16.049861495844876, "grad_norm": 0.2849455773830414, "learning_rate": 2.6088e-05, "loss": 0.0134, "step": 8699 }, { "epoch": 16.051708217913205, "grad_norm": 0.519327700138092, "learning_rate": 2.6091e-05, "loss": 0.0099, "step": 8700 }, { "epoch": 16.053554939981534, "grad_norm": 0.5305798649787903, "learning_rate": 2.6094e-05, "loss": 0.0102, "step": 8701 }, { "epoch": 16.05540166204986, "grad_norm": 0.7305449843406677, "learning_rate": 2.6097e-05, "loss": 0.0127, "step": 8702 }, { "epoch": 16.05724838411819, "grad_norm": 0.3311886787414551, "learning_rate": 2.61e-05, "loss": 0.0167, "step": 8703 }, { "epoch": 16.05909510618652, "grad_norm": 0.20189516246318817, "learning_rate": 2.6103e-05, "loss": 0.0069, "step": 8704 }, { "epoch": 16.060941828254848, "grad_norm": 0.2906367778778076, "learning_rate": 2.6106e-05, "loss": 0.0057, "step": 8705 }, { "epoch": 16.062788550323177, "grad_norm": 0.3915349543094635, "learning_rate": 2.6109e-05, "loss": 0.0093, "step": 8706 }, { "epoch": 16.064635272391506, "grad_norm": 0.3892596364021301, "learning_rate": 2.6112e-05, "loss": 0.0176, "step": 8707 }, { "epoch": 16.066481994459835, "grad_norm": 0.5112737417221069, "learning_rate": 2.6115000000000003e-05, "loss": 0.0191, "step": 8708 }, { "epoch": 16.068328716528164, "grad_norm": 0.5281062722206116, "learning_rate": 2.6118000000000003e-05, "loss": 0.0106, "step": 8709 }, { "epoch": 16.07017543859649, "grad_norm": 0.7477819919586182, "learning_rate": 2.6121000000000003e-05, "loss": 0.0182, "step": 8710 }, { "epoch": 16.07202216066482, "grad_norm": 0.43619078397750854, "learning_rate": 2.6124000000000003e-05, "loss": 0.0159, "step": 8711 }, { "epoch": 16.073868882733148, "grad_norm": 0.44052207469940186, "learning_rate": 2.6127000000000002e-05, "loss": 0.0111, "step": 8712 }, { "epoch": 16.075715604801477, "grad_norm": 0.3810107111930847, "learning_rate": 2.6130000000000002e-05, "loss": 0.0176, "step": 8713 }, { "epoch": 16.077562326869806, "grad_norm": 0.2601052224636078, "learning_rate": 2.6133e-05, "loss": 0.0149, "step": 8714 }, { "epoch": 16.079409048938135, "grad_norm": 0.40293899178504944, "learning_rate": 2.6136e-05, "loss": 0.0082, "step": 8715 }, { "epoch": 16.081255771006465, "grad_norm": 0.4989589750766754, "learning_rate": 2.6138999999999998e-05, "loss": 0.0122, "step": 8716 }, { "epoch": 16.083102493074794, "grad_norm": 0.9980294108390808, "learning_rate": 2.6141999999999998e-05, "loss": 0.0185, "step": 8717 }, { "epoch": 16.08494921514312, "grad_norm": 0.363986611366272, "learning_rate": 2.6145e-05, "loss": 0.0106, "step": 8718 }, { "epoch": 16.08679593721145, "grad_norm": 0.21164149045944214, "learning_rate": 2.6148e-05, "loss": 0.0043, "step": 8719 }, { "epoch": 16.088642659279778, "grad_norm": 0.41821521520614624, "learning_rate": 2.6151e-05, "loss": 0.0127, "step": 8720 }, { "epoch": 16.090489381348107, "grad_norm": 0.3259578049182892, "learning_rate": 2.6154e-05, "loss": 0.0104, "step": 8721 }, { "epoch": 16.092336103416436, "grad_norm": 0.6169150471687317, "learning_rate": 2.6157e-05, "loss": 0.0095, "step": 8722 }, { "epoch": 16.094182825484765, "grad_norm": 0.8425424695014954, "learning_rate": 2.616e-05, "loss": 0.2042, "step": 8723 }, { "epoch": 16.096029547553094, "grad_norm": 0.7024513483047485, "learning_rate": 2.6163e-05, "loss": 0.1881, "step": 8724 }, { "epoch": 16.097876269621423, "grad_norm": 0.7537822723388672, "learning_rate": 2.6166e-05, "loss": 0.1639, "step": 8725 }, { "epoch": 16.099722991689752, "grad_norm": 0.47779718041419983, "learning_rate": 2.6169e-05, "loss": 0.1255, "step": 8726 }, { "epoch": 16.101569713758078, "grad_norm": 0.6427140235900879, "learning_rate": 2.6172e-05, "loss": 0.1821, "step": 8727 }, { "epoch": 16.103416435826407, "grad_norm": 0.7359801530838013, "learning_rate": 2.6175000000000003e-05, "loss": 0.0879, "step": 8728 }, { "epoch": 16.105263157894736, "grad_norm": 0.5765436887741089, "learning_rate": 2.6178000000000002e-05, "loss": 0.0853, "step": 8729 }, { "epoch": 16.107109879963065, "grad_norm": 0.7798746824264526, "learning_rate": 2.6181000000000002e-05, "loss": 0.0957, "step": 8730 }, { "epoch": 16.108956602031395, "grad_norm": 0.49390435218811035, "learning_rate": 2.6184000000000002e-05, "loss": 0.081, "step": 8731 }, { "epoch": 16.110803324099724, "grad_norm": 0.403861403465271, "learning_rate": 2.6187000000000002e-05, "loss": 0.0444, "step": 8732 }, { "epoch": 16.112650046168053, "grad_norm": 0.43862980604171753, "learning_rate": 2.619e-05, "loss": 0.0694, "step": 8733 }, { "epoch": 16.114496768236382, "grad_norm": 0.6070998311042786, "learning_rate": 2.6193e-05, "loss": 0.049, "step": 8734 }, { "epoch": 16.116343490304708, "grad_norm": 0.32739898562431335, "learning_rate": 2.6196e-05, "loss": 0.0496, "step": 8735 }, { "epoch": 16.118190212373037, "grad_norm": 0.46069347858428955, "learning_rate": 2.6199e-05, "loss": 0.0236, "step": 8736 }, { "epoch": 16.120036934441366, "grad_norm": 0.3739839196205139, "learning_rate": 2.6202e-05, "loss": 0.0266, "step": 8737 }, { "epoch": 16.121883656509695, "grad_norm": 0.27839231491088867, "learning_rate": 2.6205e-05, "loss": 0.0486, "step": 8738 }, { "epoch": 16.123730378578024, "grad_norm": 0.3901972472667694, "learning_rate": 2.6208e-05, "loss": 0.0198, "step": 8739 }, { "epoch": 16.125577100646353, "grad_norm": 0.25915515422821045, "learning_rate": 2.6211e-05, "loss": 0.0236, "step": 8740 }, { "epoch": 16.127423822714682, "grad_norm": 0.29264405369758606, "learning_rate": 2.6214e-05, "loss": 0.0165, "step": 8741 }, { "epoch": 16.12927054478301, "grad_norm": 0.2875673472881317, "learning_rate": 2.6217e-05, "loss": 0.0152, "step": 8742 }, { "epoch": 16.131117266851337, "grad_norm": 0.38900938630104065, "learning_rate": 2.622e-05, "loss": 0.0146, "step": 8743 }, { "epoch": 16.132963988919666, "grad_norm": 0.23090846836566925, "learning_rate": 2.6223e-05, "loss": 0.024, "step": 8744 }, { "epoch": 16.134810710987995, "grad_norm": 0.34287866950035095, "learning_rate": 2.6226e-05, "loss": 0.0101, "step": 8745 }, { "epoch": 16.136657433056325, "grad_norm": 0.398605078458786, "learning_rate": 2.6229e-05, "loss": 0.0152, "step": 8746 }, { "epoch": 16.138504155124654, "grad_norm": 0.4421793520450592, "learning_rate": 2.6232e-05, "loss": 0.011, "step": 8747 }, { "epoch": 16.140350877192983, "grad_norm": 0.42159032821655273, "learning_rate": 2.6235000000000002e-05, "loss": 0.0113, "step": 8748 }, { "epoch": 16.142197599261312, "grad_norm": 0.5255663990974426, "learning_rate": 2.6238000000000002e-05, "loss": 0.0111, "step": 8749 }, { "epoch": 16.14404432132964, "grad_norm": 0.593675971031189, "learning_rate": 2.6241e-05, "loss": 0.0276, "step": 8750 }, { "epoch": 16.14589104339797, "grad_norm": 0.4048978090286255, "learning_rate": 2.6244e-05, "loss": 0.0132, "step": 8751 }, { "epoch": 16.147737765466296, "grad_norm": 0.252643883228302, "learning_rate": 2.6247e-05, "loss": 0.0096, "step": 8752 }, { "epoch": 16.149584487534625, "grad_norm": 0.5537533760070801, "learning_rate": 2.625e-05, "loss": 0.0143, "step": 8753 }, { "epoch": 16.151431209602954, "grad_norm": 0.4042913019657135, "learning_rate": 2.6253e-05, "loss": 0.0175, "step": 8754 }, { "epoch": 16.153277931671283, "grad_norm": 0.2454013079404831, "learning_rate": 2.6256e-05, "loss": 0.0095, "step": 8755 }, { "epoch": 16.155124653739612, "grad_norm": 0.3569791913032532, "learning_rate": 2.6259e-05, "loss": 0.0148, "step": 8756 }, { "epoch": 16.15697137580794, "grad_norm": 0.2660028040409088, "learning_rate": 2.6262e-05, "loss": 0.0113, "step": 8757 }, { "epoch": 16.15881809787627, "grad_norm": 0.2081262171268463, "learning_rate": 2.6265e-05, "loss": 0.0132, "step": 8758 }, { "epoch": 16.1606648199446, "grad_norm": 0.42625632882118225, "learning_rate": 2.6268000000000003e-05, "loss": 0.0068, "step": 8759 }, { "epoch": 16.162511542012926, "grad_norm": 0.20090582966804504, "learning_rate": 2.6271000000000003e-05, "loss": 0.0067, "step": 8760 }, { "epoch": 16.164358264081255, "grad_norm": 0.2835046947002411, "learning_rate": 2.6274000000000003e-05, "loss": 0.0099, "step": 8761 }, { "epoch": 16.166204986149584, "grad_norm": 0.8398069739341736, "learning_rate": 2.6277000000000003e-05, "loss": 0.0142, "step": 8762 }, { "epoch": 16.168051708217913, "grad_norm": 0.25367045402526855, "learning_rate": 2.628e-05, "loss": 0.0073, "step": 8763 }, { "epoch": 16.169898430286242, "grad_norm": 0.30854055285453796, "learning_rate": 2.6283e-05, "loss": 0.0086, "step": 8764 }, { "epoch": 16.17174515235457, "grad_norm": 0.5980221033096313, "learning_rate": 2.6286e-05, "loss": 0.0124, "step": 8765 }, { "epoch": 16.1735918744229, "grad_norm": 0.38559502363204956, "learning_rate": 2.6289e-05, "loss": 0.0119, "step": 8766 }, { "epoch": 16.17543859649123, "grad_norm": 0.3606363534927368, "learning_rate": 2.6292e-05, "loss": 0.0109, "step": 8767 }, { "epoch": 16.177285318559555, "grad_norm": 0.4450051486492157, "learning_rate": 2.6294999999999998e-05, "loss": 0.0133, "step": 8768 }, { "epoch": 16.179132040627884, "grad_norm": 0.412380576133728, "learning_rate": 2.6298e-05, "loss": 0.0103, "step": 8769 }, { "epoch": 16.180978762696213, "grad_norm": 0.5443732738494873, "learning_rate": 2.6301e-05, "loss": 0.012, "step": 8770 }, { "epoch": 16.182825484764543, "grad_norm": 0.8527624011039734, "learning_rate": 2.6304e-05, "loss": 0.0083, "step": 8771 }, { "epoch": 16.18467220683287, "grad_norm": 0.6248682737350464, "learning_rate": 2.6307e-05, "loss": 0.016, "step": 8772 }, { "epoch": 16.1865189289012, "grad_norm": 0.609694242477417, "learning_rate": 2.631e-05, "loss": 0.2052, "step": 8773 }, { "epoch": 16.18836565096953, "grad_norm": 1.0320067405700684, "learning_rate": 2.6313e-05, "loss": 0.2287, "step": 8774 }, { "epoch": 16.19021237303786, "grad_norm": 0.5504393577575684, "learning_rate": 2.6316e-05, "loss": 0.1232, "step": 8775 }, { "epoch": 16.19205909510619, "grad_norm": 0.5152016878128052, "learning_rate": 2.6319e-05, "loss": 0.1213, "step": 8776 }, { "epoch": 16.193905817174514, "grad_norm": 0.5570312738418579, "learning_rate": 2.6322e-05, "loss": 0.1493, "step": 8777 }, { "epoch": 16.195752539242843, "grad_norm": 0.4364548623561859, "learning_rate": 2.6325e-05, "loss": 0.1012, "step": 8778 }, { "epoch": 16.197599261311172, "grad_norm": 0.4299827218055725, "learning_rate": 2.6328000000000003e-05, "loss": 0.1013, "step": 8779 }, { "epoch": 16.1994459833795, "grad_norm": 0.4067482054233551, "learning_rate": 2.6331000000000003e-05, "loss": 0.0621, "step": 8780 }, { "epoch": 16.20129270544783, "grad_norm": 0.5066156387329102, "learning_rate": 2.6334000000000002e-05, "loss": 0.0927, "step": 8781 }, { "epoch": 16.20313942751616, "grad_norm": 0.5903967618942261, "learning_rate": 2.6337000000000002e-05, "loss": 0.1129, "step": 8782 }, { "epoch": 16.20498614958449, "grad_norm": 0.3994077146053314, "learning_rate": 2.6340000000000002e-05, "loss": 0.0623, "step": 8783 }, { "epoch": 16.206832871652818, "grad_norm": 0.3687579333782196, "learning_rate": 2.6343000000000002e-05, "loss": 0.0591, "step": 8784 }, { "epoch": 16.208679593721143, "grad_norm": 0.4481465816497803, "learning_rate": 2.6346e-05, "loss": 0.0368, "step": 8785 }, { "epoch": 16.210526315789473, "grad_norm": 0.5493351221084595, "learning_rate": 2.6349e-05, "loss": 0.0353, "step": 8786 }, { "epoch": 16.2123730378578, "grad_norm": 0.47865021228790283, "learning_rate": 2.6351999999999998e-05, "loss": 0.0393, "step": 8787 }, { "epoch": 16.21421975992613, "grad_norm": 0.6238217353820801, "learning_rate": 2.6354999999999998e-05, "loss": 0.0197, "step": 8788 }, { "epoch": 16.21606648199446, "grad_norm": 0.4697655141353607, "learning_rate": 2.6358e-05, "loss": 0.0289, "step": 8789 }, { "epoch": 16.21791320406279, "grad_norm": 0.573111355304718, "learning_rate": 2.6361e-05, "loss": 0.0179, "step": 8790 }, { "epoch": 16.21975992613112, "grad_norm": 0.20533904433250427, "learning_rate": 2.6364e-05, "loss": 0.0134, "step": 8791 }, { "epoch": 16.221606648199447, "grad_norm": 0.37425920367240906, "learning_rate": 2.6367e-05, "loss": 0.0392, "step": 8792 }, { "epoch": 16.223453370267773, "grad_norm": 0.3609694540500641, "learning_rate": 2.637e-05, "loss": 0.0111, "step": 8793 }, { "epoch": 16.225300092336102, "grad_norm": 0.2295820564031601, "learning_rate": 2.6373e-05, "loss": 0.0086, "step": 8794 }, { "epoch": 16.22714681440443, "grad_norm": 0.43150201439857483, "learning_rate": 2.6376e-05, "loss": 0.015, "step": 8795 }, { "epoch": 16.22899353647276, "grad_norm": 0.2645535469055176, "learning_rate": 2.6379e-05, "loss": 0.0154, "step": 8796 }, { "epoch": 16.23084025854109, "grad_norm": 0.26752784848213196, "learning_rate": 2.6382e-05, "loss": 0.0157, "step": 8797 }, { "epoch": 16.23268698060942, "grad_norm": 0.34007754921913147, "learning_rate": 2.6385e-05, "loss": 0.0194, "step": 8798 }, { "epoch": 16.234533702677748, "grad_norm": 0.29169556498527527, "learning_rate": 2.6388000000000002e-05, "loss": 0.0145, "step": 8799 }, { "epoch": 16.236380424746077, "grad_norm": 0.234590083360672, "learning_rate": 2.6391000000000002e-05, "loss": 0.0115, "step": 8800 }, { "epoch": 16.238227146814406, "grad_norm": 0.684226930141449, "learning_rate": 2.6394000000000002e-05, "loss": 0.0135, "step": 8801 }, { "epoch": 16.24007386888273, "grad_norm": 0.3227801024913788, "learning_rate": 2.6397e-05, "loss": 0.0207, "step": 8802 }, { "epoch": 16.24192059095106, "grad_norm": 0.930953323841095, "learning_rate": 2.64e-05, "loss": 0.0195, "step": 8803 }, { "epoch": 16.24376731301939, "grad_norm": 0.6033096313476562, "learning_rate": 2.6403e-05, "loss": 0.0184, "step": 8804 }, { "epoch": 16.24561403508772, "grad_norm": 0.3899700343608856, "learning_rate": 2.6406e-05, "loss": 0.0099, "step": 8805 }, { "epoch": 16.24746075715605, "grad_norm": 0.3385612666606903, "learning_rate": 2.6409e-05, "loss": 0.0079, "step": 8806 }, { "epoch": 16.249307479224377, "grad_norm": 0.7180120348930359, "learning_rate": 2.6412e-05, "loss": 0.0113, "step": 8807 }, { "epoch": 16.251154201292707, "grad_norm": 0.42312514781951904, "learning_rate": 2.6415e-05, "loss": 0.0125, "step": 8808 }, { "epoch": 16.253000923361036, "grad_norm": 0.291399747133255, "learning_rate": 2.6418000000000004e-05, "loss": 0.0124, "step": 8809 }, { "epoch": 16.25484764542936, "grad_norm": 0.33661431074142456, "learning_rate": 2.6421000000000003e-05, "loss": 0.0105, "step": 8810 }, { "epoch": 16.25669436749769, "grad_norm": 0.24821096658706665, "learning_rate": 2.6424000000000003e-05, "loss": 0.0066, "step": 8811 }, { "epoch": 16.25854108956602, "grad_norm": 0.3174000680446625, "learning_rate": 2.6427e-05, "loss": 0.0142, "step": 8812 }, { "epoch": 16.26038781163435, "grad_norm": 0.5178481340408325, "learning_rate": 2.643e-05, "loss": 0.0081, "step": 8813 }, { "epoch": 16.262234533702678, "grad_norm": 0.6077439188957214, "learning_rate": 2.6433e-05, "loss": 0.0206, "step": 8814 }, { "epoch": 16.264081255771007, "grad_norm": 0.33663487434387207, "learning_rate": 2.6436e-05, "loss": 0.0069, "step": 8815 }, { "epoch": 16.265927977839336, "grad_norm": 0.5525741577148438, "learning_rate": 2.6439e-05, "loss": 0.0187, "step": 8816 }, { "epoch": 16.267774699907665, "grad_norm": 0.4057387709617615, "learning_rate": 2.6442e-05, "loss": 0.0064, "step": 8817 }, { "epoch": 16.26962142197599, "grad_norm": 0.36393001675605774, "learning_rate": 2.6445e-05, "loss": 0.0158, "step": 8818 }, { "epoch": 16.27146814404432, "grad_norm": 0.3587076663970947, "learning_rate": 2.6448e-05, "loss": 0.0094, "step": 8819 }, { "epoch": 16.27331486611265, "grad_norm": 0.24401932954788208, "learning_rate": 2.6451e-05, "loss": 0.011, "step": 8820 }, { "epoch": 16.27516158818098, "grad_norm": 0.5385698676109314, "learning_rate": 2.6454e-05, "loss": 0.017, "step": 8821 }, { "epoch": 16.277008310249307, "grad_norm": 0.22548331320285797, "learning_rate": 2.6457e-05, "loss": 0.0077, "step": 8822 }, { "epoch": 16.278855032317637, "grad_norm": 0.8914181590080261, "learning_rate": 2.646e-05, "loss": 0.2099, "step": 8823 }, { "epoch": 16.280701754385966, "grad_norm": 0.6201906204223633, "learning_rate": 2.6463e-05, "loss": 0.1274, "step": 8824 }, { "epoch": 16.282548476454295, "grad_norm": 0.7464452385902405, "learning_rate": 2.6466e-05, "loss": 0.1219, "step": 8825 }, { "epoch": 16.284395198522624, "grad_norm": 0.5396767854690552, "learning_rate": 2.6469e-05, "loss": 0.1038, "step": 8826 }, { "epoch": 16.28624192059095, "grad_norm": 0.7753463983535767, "learning_rate": 2.6472e-05, "loss": 0.1185, "step": 8827 }, { "epoch": 16.28808864265928, "grad_norm": 0.47809740900993347, "learning_rate": 2.6475e-05, "loss": 0.0847, "step": 8828 }, { "epoch": 16.289935364727608, "grad_norm": 0.614112913608551, "learning_rate": 2.6478000000000003e-05, "loss": 0.1215, "step": 8829 }, { "epoch": 16.291782086795937, "grad_norm": 0.6368290781974792, "learning_rate": 2.6481000000000003e-05, "loss": 0.0517, "step": 8830 }, { "epoch": 16.293628808864266, "grad_norm": 0.6140844225883484, "learning_rate": 2.6484000000000003e-05, "loss": 0.0946, "step": 8831 }, { "epoch": 16.295475530932595, "grad_norm": 0.5148943662643433, "learning_rate": 2.6487000000000002e-05, "loss": 0.09, "step": 8832 }, { "epoch": 16.297322253000925, "grad_norm": 0.5340912342071533, "learning_rate": 2.6490000000000002e-05, "loss": 0.0596, "step": 8833 }, { "epoch": 16.299168975069254, "grad_norm": 0.43576526641845703, "learning_rate": 2.6493000000000002e-05, "loss": 0.0451, "step": 8834 }, { "epoch": 16.30101569713758, "grad_norm": 0.5544477105140686, "learning_rate": 2.6496000000000002e-05, "loss": 0.0455, "step": 8835 }, { "epoch": 16.30286241920591, "grad_norm": 0.4253067672252655, "learning_rate": 2.6499e-05, "loss": 0.0667, "step": 8836 }, { "epoch": 16.304709141274238, "grad_norm": 0.36090901494026184, "learning_rate": 2.6501999999999998e-05, "loss": 0.0404, "step": 8837 }, { "epoch": 16.306555863342567, "grad_norm": 0.39279404282569885, "learning_rate": 2.6504999999999998e-05, "loss": 0.0379, "step": 8838 }, { "epoch": 16.308402585410896, "grad_norm": 0.3534673750400543, "learning_rate": 2.6508e-05, "loss": 0.0238, "step": 8839 }, { "epoch": 16.310249307479225, "grad_norm": 0.25616320967674255, "learning_rate": 2.6511e-05, "loss": 0.0186, "step": 8840 }, { "epoch": 16.312096029547554, "grad_norm": 0.16772395372390747, "learning_rate": 2.6514e-05, "loss": 0.0112, "step": 8841 }, { "epoch": 16.313942751615883, "grad_norm": 0.30720284581184387, "learning_rate": 2.6517e-05, "loss": 0.0131, "step": 8842 }, { "epoch": 16.31578947368421, "grad_norm": 0.25934576988220215, "learning_rate": 2.652e-05, "loss": 0.0222, "step": 8843 }, { "epoch": 16.317636195752538, "grad_norm": 0.4097827076911926, "learning_rate": 2.6523e-05, "loss": 0.0266, "step": 8844 }, { "epoch": 16.319482917820867, "grad_norm": 0.4845149517059326, "learning_rate": 2.6526e-05, "loss": 0.0156, "step": 8845 }, { "epoch": 16.321329639889196, "grad_norm": 0.2271205484867096, "learning_rate": 2.6529e-05, "loss": 0.0152, "step": 8846 }, { "epoch": 16.323176361957525, "grad_norm": 0.2507508397102356, "learning_rate": 2.6532e-05, "loss": 0.011, "step": 8847 }, { "epoch": 16.325023084025855, "grad_norm": 0.22607016563415527, "learning_rate": 2.6535e-05, "loss": 0.0114, "step": 8848 }, { "epoch": 16.326869806094184, "grad_norm": 0.2688290774822235, "learning_rate": 2.6538000000000002e-05, "loss": 0.0227, "step": 8849 }, { "epoch": 16.328716528162513, "grad_norm": 0.39142295718193054, "learning_rate": 2.6541000000000002e-05, "loss": 0.0137, "step": 8850 }, { "epoch": 16.330563250230842, "grad_norm": 0.20058947801589966, "learning_rate": 2.6544000000000002e-05, "loss": 0.0078, "step": 8851 }, { "epoch": 16.332409972299168, "grad_norm": 0.1827445924282074, "learning_rate": 2.6547000000000002e-05, "loss": 0.0075, "step": 8852 }, { "epoch": 16.334256694367497, "grad_norm": 0.2042992264032364, "learning_rate": 2.655e-05, "loss": 0.0049, "step": 8853 }, { "epoch": 16.336103416435826, "grad_norm": 0.7434996962547302, "learning_rate": 2.6553e-05, "loss": 0.0413, "step": 8854 }, { "epoch": 16.337950138504155, "grad_norm": 0.274636447429657, "learning_rate": 2.6556e-05, "loss": 0.0115, "step": 8855 }, { "epoch": 16.339796860572484, "grad_norm": 0.31662482023239136, "learning_rate": 2.6559e-05, "loss": 0.0116, "step": 8856 }, { "epoch": 16.341643582640813, "grad_norm": 0.3444797992706299, "learning_rate": 2.6562e-05, "loss": 0.0123, "step": 8857 }, { "epoch": 16.343490304709142, "grad_norm": 0.2661111652851105, "learning_rate": 2.6565e-05, "loss": 0.0088, "step": 8858 }, { "epoch": 16.34533702677747, "grad_norm": 0.6574168801307678, "learning_rate": 2.6568000000000004e-05, "loss": 0.0146, "step": 8859 }, { "epoch": 16.347183748845797, "grad_norm": 0.3464463949203491, "learning_rate": 2.6571000000000004e-05, "loss": 0.0088, "step": 8860 }, { "epoch": 16.349030470914126, "grad_norm": 0.35741615295410156, "learning_rate": 2.6574e-05, "loss": 0.0107, "step": 8861 }, { "epoch": 16.350877192982455, "grad_norm": 0.4415765702724457, "learning_rate": 2.6577e-05, "loss": 0.0141, "step": 8862 }, { "epoch": 16.352723915050785, "grad_norm": 0.23338951170444489, "learning_rate": 2.658e-05, "loss": 0.0076, "step": 8863 }, { "epoch": 16.354570637119114, "grad_norm": 0.3000146448612213, "learning_rate": 2.6583e-05, "loss": 0.0111, "step": 8864 }, { "epoch": 16.356417359187443, "grad_norm": 0.29810795187950134, "learning_rate": 2.6586e-05, "loss": 0.0096, "step": 8865 }, { "epoch": 16.358264081255772, "grad_norm": 0.21178984642028809, "learning_rate": 2.6589e-05, "loss": 0.0066, "step": 8866 }, { "epoch": 16.3601108033241, "grad_norm": 1.2656605243682861, "learning_rate": 2.6592e-05, "loss": 0.0226, "step": 8867 }, { "epoch": 16.361957525392427, "grad_norm": 0.2447841763496399, "learning_rate": 2.6595e-05, "loss": 0.01, "step": 8868 }, { "epoch": 16.363804247460756, "grad_norm": 0.34307822585105896, "learning_rate": 2.6598000000000002e-05, "loss": 0.0142, "step": 8869 }, { "epoch": 16.365650969529085, "grad_norm": 0.7280043363571167, "learning_rate": 2.6601e-05, "loss": 0.0089, "step": 8870 }, { "epoch": 16.367497691597414, "grad_norm": 0.504188060760498, "learning_rate": 2.6604e-05, "loss": 0.0217, "step": 8871 }, { "epoch": 16.369344413665743, "grad_norm": 0.6764096617698669, "learning_rate": 2.6607e-05, "loss": 0.0127, "step": 8872 }, { "epoch": 16.371191135734072, "grad_norm": 0.5507748126983643, "learning_rate": 2.661e-05, "loss": 0.1713, "step": 8873 }, { "epoch": 16.3730378578024, "grad_norm": 1.3963656425476074, "learning_rate": 2.6613e-05, "loss": 0.1514, "step": 8874 }, { "epoch": 16.37488457987073, "grad_norm": 0.575821042060852, "learning_rate": 2.6616e-05, "loss": 0.1285, "step": 8875 }, { "epoch": 16.37673130193906, "grad_norm": 0.543881893157959, "learning_rate": 2.6619e-05, "loss": 0.1011, "step": 8876 }, { "epoch": 16.378578024007385, "grad_norm": 0.5799001455307007, "learning_rate": 2.6622e-05, "loss": 0.143, "step": 8877 }, { "epoch": 16.380424746075715, "grad_norm": 0.5811880230903625, "learning_rate": 2.6625e-05, "loss": 0.0985, "step": 8878 }, { "epoch": 16.382271468144044, "grad_norm": 0.6220884919166565, "learning_rate": 2.6628e-05, "loss": 0.0882, "step": 8879 }, { "epoch": 16.384118190212373, "grad_norm": 0.8949848413467407, "learning_rate": 2.6631000000000003e-05, "loss": 0.0702, "step": 8880 }, { "epoch": 16.385964912280702, "grad_norm": 0.5241614580154419, "learning_rate": 2.6634000000000003e-05, "loss": 0.0712, "step": 8881 }, { "epoch": 16.38781163434903, "grad_norm": 0.703456461429596, "learning_rate": 2.6637000000000003e-05, "loss": 0.0976, "step": 8882 }, { "epoch": 16.38965835641736, "grad_norm": 0.3968183398246765, "learning_rate": 2.6640000000000002e-05, "loss": 0.0355, "step": 8883 }, { "epoch": 16.39150507848569, "grad_norm": 0.6287881135940552, "learning_rate": 2.6643000000000002e-05, "loss": 0.0638, "step": 8884 }, { "epoch": 16.393351800554015, "grad_norm": 0.44642600417137146, "learning_rate": 2.6646000000000002e-05, "loss": 0.0287, "step": 8885 }, { "epoch": 16.395198522622344, "grad_norm": 0.7361428737640381, "learning_rate": 2.6649e-05, "loss": 0.0444, "step": 8886 }, { "epoch": 16.397045244690673, "grad_norm": 0.3432881832122803, "learning_rate": 2.6651999999999998e-05, "loss": 0.0413, "step": 8887 }, { "epoch": 16.398891966759003, "grad_norm": 0.38371410965919495, "learning_rate": 2.6654999999999998e-05, "loss": 0.0439, "step": 8888 }, { "epoch": 16.40073868882733, "grad_norm": 0.45330309867858887, "learning_rate": 2.6657999999999998e-05, "loss": 0.0178, "step": 8889 }, { "epoch": 16.40258541089566, "grad_norm": 0.33713167905807495, "learning_rate": 2.6661e-05, "loss": 0.0224, "step": 8890 }, { "epoch": 16.40443213296399, "grad_norm": 0.2396007478237152, "learning_rate": 2.6664e-05, "loss": 0.0105, "step": 8891 }, { "epoch": 16.40627885503232, "grad_norm": 0.2867899537086487, "learning_rate": 2.6667e-05, "loss": 0.037, "step": 8892 }, { "epoch": 16.408125577100645, "grad_norm": 0.32196927070617676, "learning_rate": 2.667e-05, "loss": 0.0145, "step": 8893 }, { "epoch": 16.409972299168974, "grad_norm": 0.38088223338127136, "learning_rate": 2.6673e-05, "loss": 0.0302, "step": 8894 }, { "epoch": 16.411819021237303, "grad_norm": 0.22422203421592712, "learning_rate": 2.6676e-05, "loss": 0.0101, "step": 8895 }, { "epoch": 16.413665743305632, "grad_norm": 0.3829784393310547, "learning_rate": 2.6679e-05, "loss": 0.0165, "step": 8896 }, { "epoch": 16.41551246537396, "grad_norm": 0.44792550802230835, "learning_rate": 2.6682e-05, "loss": 0.0166, "step": 8897 }, { "epoch": 16.41735918744229, "grad_norm": 0.5492526292800903, "learning_rate": 2.6685e-05, "loss": 0.0354, "step": 8898 }, { "epoch": 16.41920590951062, "grad_norm": 0.3540531396865845, "learning_rate": 2.6688e-05, "loss": 0.0148, "step": 8899 }, { "epoch": 16.42105263157895, "grad_norm": 0.1499744951725006, "learning_rate": 2.6691000000000002e-05, "loss": 0.0078, "step": 8900 }, { "epoch": 16.422899353647278, "grad_norm": 0.14010149240493774, "learning_rate": 2.6694000000000002e-05, "loss": 0.0083, "step": 8901 }, { "epoch": 16.424746075715603, "grad_norm": 0.6157842874526978, "learning_rate": 2.6697000000000002e-05, "loss": 0.0167, "step": 8902 }, { "epoch": 16.426592797783933, "grad_norm": 0.20352981984615326, "learning_rate": 2.6700000000000002e-05, "loss": 0.0094, "step": 8903 }, { "epoch": 16.42843951985226, "grad_norm": 0.27032119035720825, "learning_rate": 2.6703e-05, "loss": 0.0177, "step": 8904 }, { "epoch": 16.43028624192059, "grad_norm": 0.5394765138626099, "learning_rate": 2.6706e-05, "loss": 0.0202, "step": 8905 }, { "epoch": 16.43213296398892, "grad_norm": 0.34230151772499084, "learning_rate": 2.6709e-05, "loss": 0.0122, "step": 8906 }, { "epoch": 16.43397968605725, "grad_norm": 0.8664721250534058, "learning_rate": 2.6712e-05, "loss": 0.0166, "step": 8907 }, { "epoch": 16.43582640812558, "grad_norm": 0.41157254576683044, "learning_rate": 2.6715e-05, "loss": 0.0111, "step": 8908 }, { "epoch": 16.437673130193907, "grad_norm": 0.38635289669036865, "learning_rate": 2.6718e-05, "loss": 0.0133, "step": 8909 }, { "epoch": 16.439519852262233, "grad_norm": 0.47712311148643494, "learning_rate": 2.6721e-05, "loss": 0.0139, "step": 8910 }, { "epoch": 16.441366574330562, "grad_norm": 0.2243277132511139, "learning_rate": 2.6724e-05, "loss": 0.0093, "step": 8911 }, { "epoch": 16.44321329639889, "grad_norm": 0.396609365940094, "learning_rate": 2.6727e-05, "loss": 0.0191, "step": 8912 }, { "epoch": 16.44506001846722, "grad_norm": 0.7725248336791992, "learning_rate": 2.673e-05, "loss": 0.0263, "step": 8913 }, { "epoch": 16.44690674053555, "grad_norm": 0.2902774512767792, "learning_rate": 2.6733e-05, "loss": 0.0075, "step": 8914 }, { "epoch": 16.44875346260388, "grad_norm": 0.31212446093559265, "learning_rate": 2.6736e-05, "loss": 0.0075, "step": 8915 }, { "epoch": 16.450600184672208, "grad_norm": 0.27319321036338806, "learning_rate": 2.6739e-05, "loss": 0.0095, "step": 8916 }, { "epoch": 16.452446906740537, "grad_norm": 0.42757007479667664, "learning_rate": 2.6742e-05, "loss": 0.0152, "step": 8917 }, { "epoch": 16.454293628808863, "grad_norm": 0.2784916162490845, "learning_rate": 2.6745e-05, "loss": 0.009, "step": 8918 }, { "epoch": 16.45614035087719, "grad_norm": 0.6861608028411865, "learning_rate": 2.6748e-05, "loss": 0.0113, "step": 8919 }, { "epoch": 16.45798707294552, "grad_norm": 0.4131728410720825, "learning_rate": 2.6751000000000002e-05, "loss": 0.0332, "step": 8920 }, { "epoch": 16.45983379501385, "grad_norm": 0.7632374167442322, "learning_rate": 2.6754e-05, "loss": 0.0233, "step": 8921 }, { "epoch": 16.46168051708218, "grad_norm": 0.908040463924408, "learning_rate": 2.6757e-05, "loss": 0.0177, "step": 8922 }, { "epoch": 16.46352723915051, "grad_norm": 1.1065903902053833, "learning_rate": 2.676e-05, "loss": 0.2018, "step": 8923 }, { "epoch": 16.465373961218837, "grad_norm": 0.8847578167915344, "learning_rate": 2.6763e-05, "loss": 0.2644, "step": 8924 }, { "epoch": 16.467220683287167, "grad_norm": 0.45100724697113037, "learning_rate": 2.6766e-05, "loss": 0.0949, "step": 8925 }, { "epoch": 16.469067405355496, "grad_norm": 0.5411179065704346, "learning_rate": 2.6769e-05, "loss": 0.1164, "step": 8926 }, { "epoch": 16.47091412742382, "grad_norm": 0.4566073417663574, "learning_rate": 2.6772e-05, "loss": 0.0827, "step": 8927 }, { "epoch": 16.47276084949215, "grad_norm": 0.539164662361145, "learning_rate": 2.6775e-05, "loss": 0.0678, "step": 8928 }, { "epoch": 16.47460757156048, "grad_norm": 1.2484465837478638, "learning_rate": 2.6778e-05, "loss": 0.1078, "step": 8929 }, { "epoch": 16.47645429362881, "grad_norm": 0.6017367243766785, "learning_rate": 2.6781000000000003e-05, "loss": 0.0881, "step": 8930 }, { "epoch": 16.478301015697138, "grad_norm": 0.5592476725578308, "learning_rate": 2.6784000000000003e-05, "loss": 0.067, "step": 8931 }, { "epoch": 16.480147737765467, "grad_norm": 0.46658194065093994, "learning_rate": 2.6787000000000003e-05, "loss": 0.0652, "step": 8932 }, { "epoch": 16.481994459833796, "grad_norm": 0.6025909781455994, "learning_rate": 2.6790000000000003e-05, "loss": 0.0648, "step": 8933 }, { "epoch": 16.483841181902125, "grad_norm": 0.3990728259086609, "learning_rate": 2.6793000000000002e-05, "loss": 0.0394, "step": 8934 }, { "epoch": 16.48568790397045, "grad_norm": 0.46543025970458984, "learning_rate": 2.6796e-05, "loss": 0.0596, "step": 8935 }, { "epoch": 16.48753462603878, "grad_norm": 0.49842751026153564, "learning_rate": 2.6799e-05, "loss": 0.0499, "step": 8936 }, { "epoch": 16.48938134810711, "grad_norm": 0.38525688648223877, "learning_rate": 2.6802e-05, "loss": 0.0232, "step": 8937 }, { "epoch": 16.49122807017544, "grad_norm": 0.2686164081096649, "learning_rate": 2.6805e-05, "loss": 0.0166, "step": 8938 }, { "epoch": 16.493074792243767, "grad_norm": 0.37435072660446167, "learning_rate": 2.6807999999999998e-05, "loss": 0.0144, "step": 8939 }, { "epoch": 16.494921514312097, "grad_norm": 0.37180137634277344, "learning_rate": 2.6811e-05, "loss": 0.02, "step": 8940 }, { "epoch": 16.496768236380426, "grad_norm": 0.36476054787635803, "learning_rate": 2.6814e-05, "loss": 0.0186, "step": 8941 }, { "epoch": 16.498614958448755, "grad_norm": 0.5643298625946045, "learning_rate": 2.6817e-05, "loss": 0.0202, "step": 8942 }, { "epoch": 16.50046168051708, "grad_norm": 0.3396724462509155, "learning_rate": 2.682e-05, "loss": 0.0094, "step": 8943 }, { "epoch": 16.50230840258541, "grad_norm": 0.29790958762168884, "learning_rate": 2.6823e-05, "loss": 0.0142, "step": 8944 }, { "epoch": 16.50415512465374, "grad_norm": 0.29155367612838745, "learning_rate": 2.6826e-05, "loss": 0.0153, "step": 8945 }, { "epoch": 16.506001846722068, "grad_norm": 0.5531447529792786, "learning_rate": 2.6829e-05, "loss": 0.0105, "step": 8946 }, { "epoch": 16.507848568790397, "grad_norm": 0.40035152435302734, "learning_rate": 2.6832e-05, "loss": 0.0206, "step": 8947 }, { "epoch": 16.509695290858726, "grad_norm": 0.3767993152141571, "learning_rate": 2.6835e-05, "loss": 0.0121, "step": 8948 }, { "epoch": 16.511542012927055, "grad_norm": 0.32723501324653625, "learning_rate": 2.6838e-05, "loss": 0.0166, "step": 8949 }, { "epoch": 16.513388734995385, "grad_norm": 0.2774938642978668, "learning_rate": 2.6841000000000003e-05, "loss": 0.0453, "step": 8950 }, { "epoch": 16.51523545706371, "grad_norm": 0.6788330674171448, "learning_rate": 2.6844000000000003e-05, "loss": 0.017, "step": 8951 }, { "epoch": 16.51708217913204, "grad_norm": 0.7191709280014038, "learning_rate": 2.6847000000000002e-05, "loss": 0.0142, "step": 8952 }, { "epoch": 16.51892890120037, "grad_norm": 0.2165871262550354, "learning_rate": 2.6850000000000002e-05, "loss": 0.0089, "step": 8953 }, { "epoch": 16.520775623268698, "grad_norm": 0.4514833092689514, "learning_rate": 2.6853000000000002e-05, "loss": 0.0141, "step": 8954 }, { "epoch": 16.522622345337027, "grad_norm": 0.245515376329422, "learning_rate": 2.6856000000000002e-05, "loss": 0.0066, "step": 8955 }, { "epoch": 16.524469067405356, "grad_norm": 0.600486159324646, "learning_rate": 2.6859e-05, "loss": 0.0116, "step": 8956 }, { "epoch": 16.526315789473685, "grad_norm": 0.27086642384529114, "learning_rate": 2.6862e-05, "loss": 0.0099, "step": 8957 }, { "epoch": 16.528162511542014, "grad_norm": 0.4400690197944641, "learning_rate": 2.6865e-05, "loss": 0.0167, "step": 8958 }, { "epoch": 16.530009233610343, "grad_norm": 0.661552369594574, "learning_rate": 2.6867999999999998e-05, "loss": 0.0111, "step": 8959 }, { "epoch": 16.53185595567867, "grad_norm": 0.4757016599178314, "learning_rate": 2.6871e-05, "loss": 0.0387, "step": 8960 }, { "epoch": 16.533702677746998, "grad_norm": 0.5109610557556152, "learning_rate": 2.6874e-05, "loss": 0.0171, "step": 8961 }, { "epoch": 16.535549399815327, "grad_norm": 0.5325073599815369, "learning_rate": 2.6877e-05, "loss": 0.0185, "step": 8962 }, { "epoch": 16.537396121883656, "grad_norm": 0.6453657150268555, "learning_rate": 2.688e-05, "loss": 0.0185, "step": 8963 }, { "epoch": 16.539242843951985, "grad_norm": 0.6359427571296692, "learning_rate": 2.6883e-05, "loss": 0.025, "step": 8964 }, { "epoch": 16.541089566020315, "grad_norm": 0.7388697862625122, "learning_rate": 2.6886e-05, "loss": 0.0146, "step": 8965 }, { "epoch": 16.542936288088644, "grad_norm": 0.21786294877529144, "learning_rate": 2.6889e-05, "loss": 0.0075, "step": 8966 }, { "epoch": 16.544783010156973, "grad_norm": 0.5282310843467712, "learning_rate": 2.6892e-05, "loss": 0.0137, "step": 8967 }, { "epoch": 16.5466297322253, "grad_norm": 0.3669258654117584, "learning_rate": 2.6895e-05, "loss": 0.0152, "step": 8968 }, { "epoch": 16.548476454293628, "grad_norm": 0.6105878353118896, "learning_rate": 2.6898e-05, "loss": 0.0108, "step": 8969 }, { "epoch": 16.550323176361957, "grad_norm": 0.23995943367481232, "learning_rate": 2.6901000000000002e-05, "loss": 0.0152, "step": 8970 }, { "epoch": 16.552169898430286, "grad_norm": 0.43827906250953674, "learning_rate": 2.6904000000000002e-05, "loss": 0.0079, "step": 8971 }, { "epoch": 16.554016620498615, "grad_norm": 0.7252397537231445, "learning_rate": 2.6907000000000002e-05, "loss": 0.0357, "step": 8972 }, { "epoch": 16.555863342566944, "grad_norm": 0.651902437210083, "learning_rate": 2.691e-05, "loss": 0.2199, "step": 8973 }, { "epoch": 16.557710064635273, "grad_norm": 0.4844575822353363, "learning_rate": 2.6913e-05, "loss": 0.1116, "step": 8974 }, { "epoch": 16.559556786703602, "grad_norm": 0.6385934352874756, "learning_rate": 2.6916e-05, "loss": 0.1731, "step": 8975 }, { "epoch": 16.56140350877193, "grad_norm": 0.5000412464141846, "learning_rate": 2.6919e-05, "loss": 0.141, "step": 8976 }, { "epoch": 16.563250230840257, "grad_norm": 0.7032834887504578, "learning_rate": 2.6922e-05, "loss": 0.0896, "step": 8977 }, { "epoch": 16.565096952908586, "grad_norm": 0.5850236415863037, "learning_rate": 2.6925e-05, "loss": 0.1414, "step": 8978 }, { "epoch": 16.566943674976915, "grad_norm": 0.44791483879089355, "learning_rate": 2.6928e-05, "loss": 0.0648, "step": 8979 }, { "epoch": 16.568790397045245, "grad_norm": 0.576742947101593, "learning_rate": 2.6931000000000004e-05, "loss": 0.082, "step": 8980 }, { "epoch": 16.570637119113574, "grad_norm": 0.6310506463050842, "learning_rate": 2.6934000000000003e-05, "loss": 0.0532, "step": 8981 }, { "epoch": 16.572483841181903, "grad_norm": 0.6527665257453918, "learning_rate": 2.6937000000000003e-05, "loss": 0.0754, "step": 8982 }, { "epoch": 16.574330563250232, "grad_norm": 0.7429980039596558, "learning_rate": 2.6940000000000003e-05, "loss": 0.0793, "step": 8983 }, { "epoch": 16.57617728531856, "grad_norm": 0.7954683303833008, "learning_rate": 2.6943e-05, "loss": 0.1071, "step": 8984 }, { "epoch": 16.578024007386887, "grad_norm": 0.3820796608924866, "learning_rate": 2.6946e-05, "loss": 0.0265, "step": 8985 }, { "epoch": 16.579870729455216, "grad_norm": 0.5825003981590271, "learning_rate": 2.6949e-05, "loss": 0.0859, "step": 8986 }, { "epoch": 16.581717451523545, "grad_norm": 0.45463883876800537, "learning_rate": 2.6952e-05, "loss": 0.0692, "step": 8987 }, { "epoch": 16.583564173591874, "grad_norm": 0.1894882619380951, "learning_rate": 2.6955e-05, "loss": 0.0134, "step": 8988 }, { "epoch": 16.585410895660203, "grad_norm": 0.3609786033630371, "learning_rate": 2.6958e-05, "loss": 0.0163, "step": 8989 }, { "epoch": 16.587257617728532, "grad_norm": 0.7221500277519226, "learning_rate": 2.6961e-05, "loss": 0.0289, "step": 8990 }, { "epoch": 16.58910433979686, "grad_norm": 0.5865622758865356, "learning_rate": 2.6964e-05, "loss": 0.0236, "step": 8991 }, { "epoch": 16.59095106186519, "grad_norm": 0.2695974111557007, "learning_rate": 2.6967e-05, "loss": 0.0117, "step": 8992 }, { "epoch": 16.592797783933516, "grad_norm": 0.30907437205314636, "learning_rate": 2.697e-05, "loss": 0.0111, "step": 8993 }, { "epoch": 16.594644506001845, "grad_norm": 0.3169298768043518, "learning_rate": 2.6973e-05, "loss": 0.0141, "step": 8994 }, { "epoch": 16.596491228070175, "grad_norm": 0.739950954914093, "learning_rate": 2.6976e-05, "loss": 0.0227, "step": 8995 }, { "epoch": 16.598337950138504, "grad_norm": 0.3569239377975464, "learning_rate": 2.6979e-05, "loss": 0.0109, "step": 8996 }, { "epoch": 16.600184672206833, "grad_norm": 0.5756157636642456, "learning_rate": 2.6982e-05, "loss": 0.0196, "step": 8997 }, { "epoch": 16.602031394275162, "grad_norm": 0.661070704460144, "learning_rate": 2.6985e-05, "loss": 0.0209, "step": 8998 }, { "epoch": 16.60387811634349, "grad_norm": 0.4226855933666229, "learning_rate": 2.6988e-05, "loss": 0.0138, "step": 8999 }, { "epoch": 16.60572483841182, "grad_norm": 0.5164405107498169, "learning_rate": 2.6991000000000003e-05, "loss": 0.0205, "step": 9000 }, { "epoch": 16.60572483841182, "eval_cer": 0.11260595691733416, "eval_loss": 0.3083859980106354, "eval_runtime": 15.9192, "eval_samples_per_second": 19.096, "eval_steps_per_second": 0.628, "eval_wer": 0.39102072141212585, "step": 9000 }, { "epoch": 16.607571560480146, "grad_norm": 0.32492971420288086, "learning_rate": 2.6994000000000003e-05, "loss": 0.0159, "step": 9001 }, { "epoch": 16.609418282548475, "grad_norm": 0.30137714743614197, "learning_rate": 2.6997000000000003e-05, "loss": 0.0065, "step": 9002 }, { "epoch": 16.611265004616804, "grad_norm": 0.6133913993835449, "learning_rate": 2.7000000000000002e-05, "loss": 0.0154, "step": 9003 }, { "epoch": 16.613111726685133, "grad_norm": 0.3343926668167114, "learning_rate": 2.7003000000000002e-05, "loss": 0.0207, "step": 9004 }, { "epoch": 16.614958448753463, "grad_norm": 0.7353482246398926, "learning_rate": 2.7006000000000002e-05, "loss": 0.0181, "step": 9005 }, { "epoch": 16.61680517082179, "grad_norm": 0.6952447891235352, "learning_rate": 2.7009000000000002e-05, "loss": 0.0143, "step": 9006 }, { "epoch": 16.61865189289012, "grad_norm": 0.27672079205513, "learning_rate": 2.7012e-05, "loss": 0.0133, "step": 9007 }, { "epoch": 16.62049861495845, "grad_norm": 0.2262832075357437, "learning_rate": 2.7015e-05, "loss": 0.0108, "step": 9008 }, { "epoch": 16.62234533702678, "grad_norm": 0.7397500276565552, "learning_rate": 2.7017999999999998e-05, "loss": 0.0131, "step": 9009 }, { "epoch": 16.624192059095105, "grad_norm": 0.2488490790128708, "learning_rate": 2.7020999999999998e-05, "loss": 0.0147, "step": 9010 }, { "epoch": 16.626038781163434, "grad_norm": 0.7176915407180786, "learning_rate": 2.7024e-05, "loss": 0.0199, "step": 9011 }, { "epoch": 16.627885503231763, "grad_norm": 0.34909358620643616, "learning_rate": 2.7027e-05, "loss": 0.021, "step": 9012 }, { "epoch": 16.629732225300092, "grad_norm": 0.45952746272087097, "learning_rate": 2.703e-05, "loss": 0.0236, "step": 9013 }, { "epoch": 16.63157894736842, "grad_norm": 0.5121981501579285, "learning_rate": 2.7033e-05, "loss": 0.0224, "step": 9014 }, { "epoch": 16.63342566943675, "grad_norm": 0.3115614950656891, "learning_rate": 2.7036e-05, "loss": 0.0103, "step": 9015 }, { "epoch": 16.63527239150508, "grad_norm": 0.40834179520606995, "learning_rate": 2.7039e-05, "loss": 0.0198, "step": 9016 }, { "epoch": 16.63711911357341, "grad_norm": 0.29144415259361267, "learning_rate": 2.7042e-05, "loss": 0.015, "step": 9017 }, { "epoch": 16.638965835641734, "grad_norm": 0.535346269607544, "learning_rate": 2.7045e-05, "loss": 0.0197, "step": 9018 }, { "epoch": 16.640812557710063, "grad_norm": 0.6192907094955444, "learning_rate": 2.7048e-05, "loss": 0.0367, "step": 9019 }, { "epoch": 16.642659279778393, "grad_norm": 0.31909242272377014, "learning_rate": 2.7051e-05, "loss": 0.0191, "step": 9020 }, { "epoch": 16.64450600184672, "grad_norm": 0.43197062611579895, "learning_rate": 2.7054000000000002e-05, "loss": 0.0174, "step": 9021 }, { "epoch": 16.64635272391505, "grad_norm": 0.3951015770435333, "learning_rate": 2.7057000000000002e-05, "loss": 0.0128, "step": 9022 }, { "epoch": 16.64819944598338, "grad_norm": 0.775962233543396, "learning_rate": 2.7060000000000002e-05, "loss": 0.2058, "step": 9023 }, { "epoch": 16.65004616805171, "grad_norm": 0.6837390661239624, "learning_rate": 2.7063e-05, "loss": 0.1974, "step": 9024 }, { "epoch": 16.65189289012004, "grad_norm": 0.499403715133667, "learning_rate": 2.7066e-05, "loss": 0.1654, "step": 9025 }, { "epoch": 16.653739612188367, "grad_norm": 0.5308801531791687, "learning_rate": 2.7069e-05, "loss": 0.1186, "step": 9026 }, { "epoch": 16.655586334256693, "grad_norm": 0.5072598457336426, "learning_rate": 2.7072e-05, "loss": 0.1018, "step": 9027 }, { "epoch": 16.657433056325022, "grad_norm": 0.5138546824455261, "learning_rate": 2.7075e-05, "loss": 0.1102, "step": 9028 }, { "epoch": 16.65927977839335, "grad_norm": 0.49401623010635376, "learning_rate": 2.7078e-05, "loss": 0.0695, "step": 9029 }, { "epoch": 16.66112650046168, "grad_norm": 0.47299742698669434, "learning_rate": 2.7081e-05, "loss": 0.0837, "step": 9030 }, { "epoch": 16.66297322253001, "grad_norm": 0.39474567770957947, "learning_rate": 2.7084000000000004e-05, "loss": 0.0639, "step": 9031 }, { "epoch": 16.66481994459834, "grad_norm": 0.4763193726539612, "learning_rate": 2.7087000000000003e-05, "loss": 0.1129, "step": 9032 }, { "epoch": 16.666666666666668, "grad_norm": 0.6414193511009216, "learning_rate": 2.709e-05, "loss": 0.047, "step": 9033 }, { "epoch": 16.668513388734997, "grad_norm": 0.4468953609466553, "learning_rate": 2.7093e-05, "loss": 0.0435, "step": 9034 }, { "epoch": 16.670360110803323, "grad_norm": 0.6856594681739807, "learning_rate": 2.7096e-05, "loss": 0.0754, "step": 9035 }, { "epoch": 16.67220683287165, "grad_norm": 0.2718965709209442, "learning_rate": 2.7099e-05, "loss": 0.0332, "step": 9036 }, { "epoch": 16.67405355493998, "grad_norm": 0.28632283210754395, "learning_rate": 2.7102e-05, "loss": 0.0433, "step": 9037 }, { "epoch": 16.67590027700831, "grad_norm": 0.4210139214992523, "learning_rate": 2.7105e-05, "loss": 0.0345, "step": 9038 }, { "epoch": 16.67774699907664, "grad_norm": 0.3410778343677521, "learning_rate": 2.7108e-05, "loss": 0.0247, "step": 9039 }, { "epoch": 16.67959372114497, "grad_norm": 0.4439637362957001, "learning_rate": 2.7111e-05, "loss": 0.0153, "step": 9040 }, { "epoch": 16.681440443213297, "grad_norm": 0.9153331518173218, "learning_rate": 2.7114e-05, "loss": 0.0107, "step": 9041 }, { "epoch": 16.683287165281627, "grad_norm": 0.3366173505783081, "learning_rate": 2.7117e-05, "loss": 0.0122, "step": 9042 }, { "epoch": 16.685133887349952, "grad_norm": 0.20940084755420685, "learning_rate": 2.712e-05, "loss": 0.0175, "step": 9043 }, { "epoch": 16.68698060941828, "grad_norm": 0.861236572265625, "learning_rate": 2.7123e-05, "loss": 0.0342, "step": 9044 }, { "epoch": 16.68882733148661, "grad_norm": 0.41026780009269714, "learning_rate": 2.7126e-05, "loss": 0.0277, "step": 9045 }, { "epoch": 16.69067405355494, "grad_norm": 0.24577605724334717, "learning_rate": 2.7129e-05, "loss": 0.0119, "step": 9046 }, { "epoch": 16.69252077562327, "grad_norm": 0.33204078674316406, "learning_rate": 2.7132e-05, "loss": 0.0131, "step": 9047 }, { "epoch": 16.694367497691598, "grad_norm": 0.1572006791830063, "learning_rate": 2.7135e-05, "loss": 0.0093, "step": 9048 }, { "epoch": 16.696214219759927, "grad_norm": 0.6033346652984619, "learning_rate": 2.7138e-05, "loss": 0.0179, "step": 9049 }, { "epoch": 16.698060941828256, "grad_norm": 0.31534436345100403, "learning_rate": 2.7141e-05, "loss": 0.011, "step": 9050 }, { "epoch": 16.69990766389658, "grad_norm": 0.29961878061294556, "learning_rate": 2.7144000000000003e-05, "loss": 0.0254, "step": 9051 }, { "epoch": 16.70175438596491, "grad_norm": 0.263561487197876, "learning_rate": 2.7147000000000003e-05, "loss": 0.0121, "step": 9052 }, { "epoch": 16.70360110803324, "grad_norm": 0.28035175800323486, "learning_rate": 2.7150000000000003e-05, "loss": 0.0079, "step": 9053 }, { "epoch": 16.70544783010157, "grad_norm": 0.43491461873054504, "learning_rate": 2.7153000000000002e-05, "loss": 0.0226, "step": 9054 }, { "epoch": 16.7072945521699, "grad_norm": 0.2045207917690277, "learning_rate": 2.7156000000000002e-05, "loss": 0.0108, "step": 9055 }, { "epoch": 16.709141274238227, "grad_norm": 0.35303208231925964, "learning_rate": 2.7159000000000002e-05, "loss": 0.0138, "step": 9056 }, { "epoch": 16.710987996306557, "grad_norm": 0.24313557147979736, "learning_rate": 2.7162000000000002e-05, "loss": 0.0126, "step": 9057 }, { "epoch": 16.712834718374886, "grad_norm": 0.4155482351779938, "learning_rate": 2.7164999999999998e-05, "loss": 0.0184, "step": 9058 }, { "epoch": 16.714681440443215, "grad_norm": 0.35526689887046814, "learning_rate": 2.7167999999999998e-05, "loss": 0.0127, "step": 9059 }, { "epoch": 16.71652816251154, "grad_norm": 1.286755084991455, "learning_rate": 2.7170999999999998e-05, "loss": 0.0127, "step": 9060 }, { "epoch": 16.71837488457987, "grad_norm": 0.4949331283569336, "learning_rate": 2.7174e-05, "loss": 0.018, "step": 9061 }, { "epoch": 16.7202216066482, "grad_norm": 0.2951291501522064, "learning_rate": 2.7177e-05, "loss": 0.0107, "step": 9062 }, { "epoch": 16.722068328716528, "grad_norm": 0.29090291261672974, "learning_rate": 2.718e-05, "loss": 0.0178, "step": 9063 }, { "epoch": 16.723915050784857, "grad_norm": 0.4155007600784302, "learning_rate": 2.7183e-05, "loss": 0.0122, "step": 9064 }, { "epoch": 16.725761772853186, "grad_norm": 0.5140042901039124, "learning_rate": 2.7186e-05, "loss": 0.012, "step": 9065 }, { "epoch": 16.727608494921515, "grad_norm": 0.6684512495994568, "learning_rate": 2.7189e-05, "loss": 0.0278, "step": 9066 }, { "epoch": 16.729455216989845, "grad_norm": 0.5385333895683289, "learning_rate": 2.7192e-05, "loss": 0.0111, "step": 9067 }, { "epoch": 16.73130193905817, "grad_norm": 0.5201623439788818, "learning_rate": 2.7195e-05, "loss": 0.0127, "step": 9068 }, { "epoch": 16.7331486611265, "grad_norm": 0.353179395198822, "learning_rate": 2.7198e-05, "loss": 0.0092, "step": 9069 }, { "epoch": 16.73499538319483, "grad_norm": 0.3160616159439087, "learning_rate": 2.7201e-05, "loss": 0.0141, "step": 9070 }, { "epoch": 16.736842105263158, "grad_norm": 0.21332009136676788, "learning_rate": 2.7204000000000002e-05, "loss": 0.0079, "step": 9071 }, { "epoch": 16.738688827331487, "grad_norm": 0.6301090121269226, "learning_rate": 2.7207000000000002e-05, "loss": 0.029, "step": 9072 }, { "epoch": 16.740535549399816, "grad_norm": 0.8239292502403259, "learning_rate": 2.7210000000000002e-05, "loss": 0.2004, "step": 9073 }, { "epoch": 16.742382271468145, "grad_norm": 0.6537140011787415, "learning_rate": 2.7213000000000002e-05, "loss": 0.1969, "step": 9074 }, { "epoch": 16.744228993536474, "grad_norm": 0.6992922425270081, "learning_rate": 2.7216e-05, "loss": 0.1587, "step": 9075 }, { "epoch": 16.746075715604803, "grad_norm": 0.5989820957183838, "learning_rate": 2.7219e-05, "loss": 0.0969, "step": 9076 }, { "epoch": 16.74792243767313, "grad_norm": 0.440569132566452, "learning_rate": 2.7222e-05, "loss": 0.0913, "step": 9077 }, { "epoch": 16.749769159741458, "grad_norm": 0.41203105449676514, "learning_rate": 2.7225e-05, "loss": 0.0934, "step": 9078 }, { "epoch": 16.751615881809787, "grad_norm": 0.4103984534740448, "learning_rate": 2.7228e-05, "loss": 0.1059, "step": 9079 }, { "epoch": 16.753462603878116, "grad_norm": 0.4278160035610199, "learning_rate": 2.7231e-05, "loss": 0.0839, "step": 9080 }, { "epoch": 16.755309325946445, "grad_norm": 0.4500424563884735, "learning_rate": 2.7234000000000004e-05, "loss": 0.0509, "step": 9081 }, { "epoch": 16.757156048014775, "grad_norm": 0.4022199809551239, "learning_rate": 2.7237e-05, "loss": 0.0625, "step": 9082 }, { "epoch": 16.759002770083104, "grad_norm": 0.41468554735183716, "learning_rate": 2.724e-05, "loss": 0.0718, "step": 9083 }, { "epoch": 16.760849492151433, "grad_norm": 0.6440789699554443, "learning_rate": 2.7243e-05, "loss": 0.0631, "step": 9084 }, { "epoch": 16.76269621421976, "grad_norm": 0.6056049466133118, "learning_rate": 2.7246e-05, "loss": 0.0454, "step": 9085 }, { "epoch": 16.764542936288088, "grad_norm": 0.47346749901771545, "learning_rate": 2.7249e-05, "loss": 0.0418, "step": 9086 }, { "epoch": 16.766389658356417, "grad_norm": 0.3096546232700348, "learning_rate": 2.7252e-05, "loss": 0.0327, "step": 9087 }, { "epoch": 16.768236380424746, "grad_norm": 0.17871594429016113, "learning_rate": 2.7255e-05, "loss": 0.0147, "step": 9088 }, { "epoch": 16.770083102493075, "grad_norm": 0.3867637813091278, "learning_rate": 2.7258e-05, "loss": 0.0307, "step": 9089 }, { "epoch": 16.771929824561404, "grad_norm": 0.46113646030426025, "learning_rate": 2.7261e-05, "loss": 0.0482, "step": 9090 }, { "epoch": 16.773776546629733, "grad_norm": 0.5311370491981506, "learning_rate": 2.7264000000000002e-05, "loss": 0.0398, "step": 9091 }, { "epoch": 16.775623268698062, "grad_norm": 0.3156410753726959, "learning_rate": 2.7267e-05, "loss": 0.079, "step": 9092 }, { "epoch": 16.777469990766388, "grad_norm": 0.38590025901794434, "learning_rate": 2.727e-05, "loss": 0.0261, "step": 9093 }, { "epoch": 16.779316712834717, "grad_norm": 0.24432797729969025, "learning_rate": 2.7273e-05, "loss": 0.0094, "step": 9094 }, { "epoch": 16.781163434903046, "grad_norm": 0.2722245454788208, "learning_rate": 2.7276e-05, "loss": 0.0119, "step": 9095 }, { "epoch": 16.783010156971375, "grad_norm": 0.31428173184394836, "learning_rate": 2.7279e-05, "loss": 0.0155, "step": 9096 }, { "epoch": 16.784856879039705, "grad_norm": 0.41833361983299255, "learning_rate": 2.7282e-05, "loss": 0.026, "step": 9097 }, { "epoch": 16.786703601108034, "grad_norm": 0.21867933869361877, "learning_rate": 2.7285e-05, "loss": 0.0102, "step": 9098 }, { "epoch": 16.788550323176363, "grad_norm": 0.3054670989513397, "learning_rate": 2.7288e-05, "loss": 0.0162, "step": 9099 }, { "epoch": 16.790397045244692, "grad_norm": 0.26028329133987427, "learning_rate": 2.7291e-05, "loss": 0.0175, "step": 9100 }, { "epoch": 16.792243767313018, "grad_norm": 0.26109904050827026, "learning_rate": 2.7294000000000003e-05, "loss": 0.0083, "step": 9101 }, { "epoch": 16.794090489381347, "grad_norm": 0.3616829514503479, "learning_rate": 2.7297000000000003e-05, "loss": 0.0109, "step": 9102 }, { "epoch": 16.795937211449676, "grad_norm": 0.3457438349723816, "learning_rate": 2.7300000000000003e-05, "loss": 0.0127, "step": 9103 }, { "epoch": 16.797783933518005, "grad_norm": 0.32503554224967957, "learning_rate": 2.7303000000000003e-05, "loss": 0.0143, "step": 9104 }, { "epoch": 16.799630655586334, "grad_norm": 0.17920361459255219, "learning_rate": 2.7306000000000002e-05, "loss": 0.0073, "step": 9105 }, { "epoch": 16.801477377654663, "grad_norm": 0.0969751626253128, "learning_rate": 2.7309000000000002e-05, "loss": 0.0028, "step": 9106 }, { "epoch": 16.803324099722992, "grad_norm": 0.25758984684944153, "learning_rate": 2.7312e-05, "loss": 0.0101, "step": 9107 }, { "epoch": 16.80517082179132, "grad_norm": 0.33695298433303833, "learning_rate": 2.7315e-05, "loss": 0.0104, "step": 9108 }, { "epoch": 16.80701754385965, "grad_norm": 0.8208706974983215, "learning_rate": 2.7318e-05, "loss": 0.0117, "step": 9109 }, { "epoch": 16.808864265927976, "grad_norm": 0.3066198527812958, "learning_rate": 2.7320999999999998e-05, "loss": 0.0106, "step": 9110 }, { "epoch": 16.810710987996305, "grad_norm": 0.9490204453468323, "learning_rate": 2.7324e-05, "loss": 0.031, "step": 9111 }, { "epoch": 16.812557710064635, "grad_norm": 0.344614177942276, "learning_rate": 2.7327e-05, "loss": 0.0114, "step": 9112 }, { "epoch": 16.814404432132964, "grad_norm": 0.27744945883750916, "learning_rate": 2.733e-05, "loss": 0.0085, "step": 9113 }, { "epoch": 16.816251154201293, "grad_norm": 0.3573910593986511, "learning_rate": 2.7333e-05, "loss": 0.0129, "step": 9114 }, { "epoch": 16.818097876269622, "grad_norm": 0.44293442368507385, "learning_rate": 2.7336e-05, "loss": 0.0201, "step": 9115 }, { "epoch": 16.81994459833795, "grad_norm": 0.5297267436981201, "learning_rate": 2.7339e-05, "loss": 0.0124, "step": 9116 }, { "epoch": 16.82179132040628, "grad_norm": 0.33147096633911133, "learning_rate": 2.7342e-05, "loss": 0.0143, "step": 9117 }, { "epoch": 16.823638042474606, "grad_norm": 0.36776602268218994, "learning_rate": 2.7345e-05, "loss": 0.0164, "step": 9118 }, { "epoch": 16.825484764542935, "grad_norm": 0.3640115559101105, "learning_rate": 2.7348e-05, "loss": 0.011, "step": 9119 }, { "epoch": 16.827331486611264, "grad_norm": 0.4329957962036133, "learning_rate": 2.7351e-05, "loss": 0.0157, "step": 9120 }, { "epoch": 16.829178208679593, "grad_norm": 0.9924282431602478, "learning_rate": 2.7354000000000003e-05, "loss": 0.0122, "step": 9121 }, { "epoch": 16.831024930747922, "grad_norm": 0.8403156399726868, "learning_rate": 2.7357000000000003e-05, "loss": 0.0295, "step": 9122 }, { "epoch": 16.83287165281625, "grad_norm": 0.7272826433181763, "learning_rate": 2.7360000000000002e-05, "loss": 0.2257, "step": 9123 }, { "epoch": 16.83471837488458, "grad_norm": 0.512742280960083, "learning_rate": 2.7363000000000002e-05, "loss": 0.1599, "step": 9124 }, { "epoch": 16.83656509695291, "grad_norm": 0.5445821285247803, "learning_rate": 2.7366000000000002e-05, "loss": 0.1199, "step": 9125 }, { "epoch": 16.83841181902124, "grad_norm": 0.57929927110672, "learning_rate": 2.7369000000000002e-05, "loss": 0.1051, "step": 9126 }, { "epoch": 16.840258541089565, "grad_norm": 0.5976483225822449, "learning_rate": 2.7372e-05, "loss": 0.1466, "step": 9127 }, { "epoch": 16.842105263157894, "grad_norm": 0.6029165983200073, "learning_rate": 2.7375e-05, "loss": 0.0952, "step": 9128 }, { "epoch": 16.843951985226223, "grad_norm": 0.47818711400032043, "learning_rate": 2.7378e-05, "loss": 0.1068, "step": 9129 }, { "epoch": 16.845798707294552, "grad_norm": 0.351434588432312, "learning_rate": 2.7381e-05, "loss": 0.0641, "step": 9130 }, { "epoch": 16.84764542936288, "grad_norm": 0.4097804129123688, "learning_rate": 2.7383999999999997e-05, "loss": 0.0618, "step": 9131 }, { "epoch": 16.84949215143121, "grad_norm": 0.41000524163246155, "learning_rate": 2.7387e-05, "loss": 0.0526, "step": 9132 }, { "epoch": 16.85133887349954, "grad_norm": 0.4092511236667633, "learning_rate": 2.739e-05, "loss": 0.0473, "step": 9133 }, { "epoch": 16.85318559556787, "grad_norm": 0.5258705019950867, "learning_rate": 2.7393e-05, "loss": 0.0966, "step": 9134 }, { "epoch": 16.855032317636194, "grad_norm": 0.7498922944068909, "learning_rate": 2.7396e-05, "loss": 0.0614, "step": 9135 }, { "epoch": 16.856879039704523, "grad_norm": 0.4262339770793915, "learning_rate": 2.7399e-05, "loss": 0.0303, "step": 9136 }, { "epoch": 16.858725761772853, "grad_norm": 0.45660534501075745, "learning_rate": 2.7402e-05, "loss": 0.034, "step": 9137 }, { "epoch": 16.86057248384118, "grad_norm": 0.34982824325561523, "learning_rate": 2.7405e-05, "loss": 0.0176, "step": 9138 }, { "epoch": 16.86241920590951, "grad_norm": 0.6266517639160156, "learning_rate": 2.7408e-05, "loss": 0.0548, "step": 9139 }, { "epoch": 16.86426592797784, "grad_norm": 0.26492610573768616, "learning_rate": 2.7411e-05, "loss": 0.0214, "step": 9140 }, { "epoch": 16.86611265004617, "grad_norm": 0.3128187954425812, "learning_rate": 2.7414e-05, "loss": 0.0197, "step": 9141 }, { "epoch": 16.8679593721145, "grad_norm": 0.4308634102344513, "learning_rate": 2.7417000000000002e-05, "loss": 0.0205, "step": 9142 }, { "epoch": 16.869806094182824, "grad_norm": 0.319132924079895, "learning_rate": 2.7420000000000002e-05, "loss": 0.0178, "step": 9143 }, { "epoch": 16.871652816251153, "grad_norm": 0.2279515415430069, "learning_rate": 2.7423e-05, "loss": 0.0097, "step": 9144 }, { "epoch": 16.873499538319482, "grad_norm": 0.2076783925294876, "learning_rate": 2.7426e-05, "loss": 0.014, "step": 9145 }, { "epoch": 16.87534626038781, "grad_norm": 0.38339874148368835, "learning_rate": 2.7429e-05, "loss": 0.0168, "step": 9146 }, { "epoch": 16.87719298245614, "grad_norm": 0.476095974445343, "learning_rate": 2.7432e-05, "loss": 0.0216, "step": 9147 }, { "epoch": 16.87903970452447, "grad_norm": 0.25043198466300964, "learning_rate": 2.7435e-05, "loss": 0.0108, "step": 9148 }, { "epoch": 16.8808864265928, "grad_norm": 0.4922471344470978, "learning_rate": 2.7438e-05, "loss": 0.0111, "step": 9149 }, { "epoch": 16.882733148661128, "grad_norm": 0.2648068964481354, "learning_rate": 2.7441e-05, "loss": 0.0192, "step": 9150 }, { "epoch": 16.884579870729453, "grad_norm": 0.47534462809562683, "learning_rate": 2.7444e-05, "loss": 0.017, "step": 9151 }, { "epoch": 16.886426592797783, "grad_norm": 0.37938153743743896, "learning_rate": 2.7447000000000003e-05, "loss": 0.0418, "step": 9152 }, { "epoch": 16.88827331486611, "grad_norm": 0.3794781267642975, "learning_rate": 2.7450000000000003e-05, "loss": 0.0161, "step": 9153 }, { "epoch": 16.89012003693444, "grad_norm": 0.21170689165592194, "learning_rate": 2.7453000000000003e-05, "loss": 0.0089, "step": 9154 }, { "epoch": 16.89196675900277, "grad_norm": 0.3793889880180359, "learning_rate": 2.7456000000000003e-05, "loss": 0.0131, "step": 9155 }, { "epoch": 16.8938134810711, "grad_norm": 0.40929901599884033, "learning_rate": 2.7459e-05, "loss": 0.0116, "step": 9156 }, { "epoch": 16.89566020313943, "grad_norm": 0.8098201751708984, "learning_rate": 2.7462e-05, "loss": 0.015, "step": 9157 }, { "epoch": 16.897506925207757, "grad_norm": 0.2994246482849121, "learning_rate": 2.7465e-05, "loss": 0.0117, "step": 9158 }, { "epoch": 16.899353647276087, "grad_norm": 0.8491688370704651, "learning_rate": 2.7468e-05, "loss": 0.0389, "step": 9159 }, { "epoch": 16.901200369344412, "grad_norm": 0.29761558771133423, "learning_rate": 2.7471e-05, "loss": 0.0126, "step": 9160 }, { "epoch": 16.90304709141274, "grad_norm": 0.2771746814250946, "learning_rate": 2.7473999999999998e-05, "loss": 0.0154, "step": 9161 }, { "epoch": 16.90489381348107, "grad_norm": 0.19669193029403687, "learning_rate": 2.7477e-05, "loss": 0.0131, "step": 9162 }, { "epoch": 16.9067405355494, "grad_norm": 0.42978933453559875, "learning_rate": 2.748e-05, "loss": 0.0165, "step": 9163 }, { "epoch": 16.90858725761773, "grad_norm": 0.48822492361068726, "learning_rate": 2.7483e-05, "loss": 0.0127, "step": 9164 }, { "epoch": 16.910433979686058, "grad_norm": 0.3552548587322235, "learning_rate": 2.7486e-05, "loss": 0.0119, "step": 9165 }, { "epoch": 16.912280701754387, "grad_norm": 0.48361822962760925, "learning_rate": 2.7489e-05, "loss": 0.0135, "step": 9166 }, { "epoch": 16.914127423822716, "grad_norm": 0.2711721956729889, "learning_rate": 2.7492e-05, "loss": 0.008, "step": 9167 }, { "epoch": 16.91597414589104, "grad_norm": 0.41434890031814575, "learning_rate": 2.7495e-05, "loss": 0.0142, "step": 9168 }, { "epoch": 16.91782086795937, "grad_norm": 0.3995231091976166, "learning_rate": 2.7498e-05, "loss": 0.0163, "step": 9169 }, { "epoch": 16.9196675900277, "grad_norm": 0.6402641534805298, "learning_rate": 2.7501e-05, "loss": 0.0163, "step": 9170 }, { "epoch": 16.92151431209603, "grad_norm": 0.46368321776390076, "learning_rate": 2.7504e-05, "loss": 0.0152, "step": 9171 }, { "epoch": 16.92336103416436, "grad_norm": 0.36163800954818726, "learning_rate": 2.7507000000000003e-05, "loss": 0.0247, "step": 9172 }, { "epoch": 16.925207756232687, "grad_norm": 0.6181609034538269, "learning_rate": 2.7510000000000003e-05, "loss": 0.1281, "step": 9173 }, { "epoch": 16.927054478301017, "grad_norm": 0.7446320056915283, "learning_rate": 2.7513000000000002e-05, "loss": 0.14, "step": 9174 }, { "epoch": 16.928901200369346, "grad_norm": 0.6812530755996704, "learning_rate": 2.7516000000000002e-05, "loss": 0.1675, "step": 9175 }, { "epoch": 16.930747922437675, "grad_norm": 0.6202914714813232, "learning_rate": 2.7519000000000002e-05, "loss": 0.1464, "step": 9176 }, { "epoch": 16.932594644506, "grad_norm": 0.4810855984687805, "learning_rate": 2.7522000000000002e-05, "loss": 0.0624, "step": 9177 }, { "epoch": 16.93444136657433, "grad_norm": 0.6213238835334778, "learning_rate": 2.7525e-05, "loss": 0.0846, "step": 9178 }, { "epoch": 16.93628808864266, "grad_norm": 1.0476669073104858, "learning_rate": 2.7528e-05, "loss": 0.1087, "step": 9179 }, { "epoch": 16.938134810710988, "grad_norm": 0.5438078045845032, "learning_rate": 2.7531e-05, "loss": 0.0478, "step": 9180 }, { "epoch": 16.939981532779317, "grad_norm": 0.6464812159538269, "learning_rate": 2.7533999999999998e-05, "loss": 0.0393, "step": 9181 }, { "epoch": 16.941828254847646, "grad_norm": 0.8130463361740112, "learning_rate": 2.7537e-05, "loss": 0.0671, "step": 9182 }, { "epoch": 16.943674976915975, "grad_norm": 0.4148968756198883, "learning_rate": 2.754e-05, "loss": 0.071, "step": 9183 }, { "epoch": 16.945521698984304, "grad_norm": 0.4553473889827728, "learning_rate": 2.7543e-05, "loss": 0.0552, "step": 9184 }, { "epoch": 16.94736842105263, "grad_norm": 0.4178435802459717, "learning_rate": 2.7546e-05, "loss": 0.0193, "step": 9185 }, { "epoch": 16.94921514312096, "grad_norm": 0.4120926856994629, "learning_rate": 2.7549e-05, "loss": 0.0186, "step": 9186 }, { "epoch": 16.95106186518929, "grad_norm": 0.1929618865251541, "learning_rate": 2.7552e-05, "loss": 0.0131, "step": 9187 }, { "epoch": 16.952908587257618, "grad_norm": 0.387055367231369, "learning_rate": 2.7555e-05, "loss": 0.0126, "step": 9188 }, { "epoch": 16.954755309325947, "grad_norm": 0.17407526075839996, "learning_rate": 2.7558e-05, "loss": 0.0131, "step": 9189 }, { "epoch": 16.956602031394276, "grad_norm": 0.24599270522594452, "learning_rate": 2.7561e-05, "loss": 0.0164, "step": 9190 }, { "epoch": 16.958448753462605, "grad_norm": 0.19677557051181793, "learning_rate": 2.7564e-05, "loss": 0.0093, "step": 9191 }, { "epoch": 16.960295475530934, "grad_norm": 0.465720534324646, "learning_rate": 2.7567000000000002e-05, "loss": 0.0147, "step": 9192 }, { "epoch": 16.96214219759926, "grad_norm": 0.8160534501075745, "learning_rate": 2.7570000000000002e-05, "loss": 0.0268, "step": 9193 }, { "epoch": 16.96398891966759, "grad_norm": 0.19757269322872162, "learning_rate": 2.7573000000000002e-05, "loss": 0.008, "step": 9194 }, { "epoch": 16.965835641735918, "grad_norm": 0.3500001132488251, "learning_rate": 2.7576e-05, "loss": 0.0165, "step": 9195 }, { "epoch": 16.967682363804247, "grad_norm": 0.5525098443031311, "learning_rate": 2.7579e-05, "loss": 0.0607, "step": 9196 }, { "epoch": 16.969529085872576, "grad_norm": 0.6156070828437805, "learning_rate": 2.7582e-05, "loss": 0.0248, "step": 9197 }, { "epoch": 16.971375807940905, "grad_norm": 0.4163903594017029, "learning_rate": 2.7585e-05, "loss": 0.0157, "step": 9198 }, { "epoch": 16.973222530009235, "grad_norm": 0.3857148289680481, "learning_rate": 2.7588e-05, "loss": 0.0099, "step": 9199 }, { "epoch": 16.975069252077564, "grad_norm": 0.27057746052742004, "learning_rate": 2.7591e-05, "loss": 0.0098, "step": 9200 }, { "epoch": 16.97691597414589, "grad_norm": 0.4757899045944214, "learning_rate": 2.7594e-05, "loss": 0.0134, "step": 9201 }, { "epoch": 16.97876269621422, "grad_norm": 0.4091153144836426, "learning_rate": 2.7597000000000004e-05, "loss": 0.0117, "step": 9202 }, { "epoch": 16.980609418282548, "grad_norm": 0.16787925362586975, "learning_rate": 2.7600000000000003e-05, "loss": 0.0082, "step": 9203 }, { "epoch": 16.982456140350877, "grad_norm": 0.2270852029323578, "learning_rate": 2.7603000000000003e-05, "loss": 0.0098, "step": 9204 }, { "epoch": 16.984302862419206, "grad_norm": 0.2602526843547821, "learning_rate": 2.7606e-05, "loss": 0.0173, "step": 9205 }, { "epoch": 16.986149584487535, "grad_norm": 0.3468780219554901, "learning_rate": 2.7609e-05, "loss": 0.0138, "step": 9206 }, { "epoch": 16.987996306555864, "grad_norm": 0.1793440729379654, "learning_rate": 2.7612e-05, "loss": 0.0096, "step": 9207 }, { "epoch": 16.989843028624193, "grad_norm": 0.35675808787345886, "learning_rate": 2.7615e-05, "loss": 0.0139, "step": 9208 }, { "epoch": 16.991689750692522, "grad_norm": 0.7460472583770752, "learning_rate": 2.7618e-05, "loss": 0.0157, "step": 9209 }, { "epoch": 16.993536472760848, "grad_norm": 0.8922962546348572, "learning_rate": 2.7621e-05, "loss": 0.0165, "step": 9210 }, { "epoch": 16.995383194829177, "grad_norm": 0.4702712893486023, "learning_rate": 2.7624e-05, "loss": 0.0157, "step": 9211 }, { "epoch": 16.997229916897506, "grad_norm": 0.8927752375602722, "learning_rate": 2.7627e-05, "loss": 0.0223, "step": 9212 }, { "epoch": 16.999076638965835, "grad_norm": 0.5239241123199463, "learning_rate": 2.763e-05, "loss": 0.0123, "step": 9213 }, { "epoch": 17.0, "grad_norm": 1.5342310667037964, "learning_rate": 2.7633e-05, "loss": 0.0427, "step": 9214 }, { "epoch": 17.00184672206833, "grad_norm": 1.8125861883163452, "learning_rate": 2.7636e-05, "loss": 0.2042, "step": 9215 }, { "epoch": 17.00369344413666, "grad_norm": 0.9332644939422607, "learning_rate": 2.7639e-05, "loss": 0.1342, "step": 9216 }, { "epoch": 17.005540166204987, "grad_norm": 1.0487055778503418, "learning_rate": 2.7642e-05, "loss": 0.2083, "step": 9217 }, { "epoch": 17.007386888273317, "grad_norm": 0.5406859517097473, "learning_rate": 2.7645e-05, "loss": 0.1001, "step": 9218 }, { "epoch": 17.009233610341642, "grad_norm": 0.8032557964324951, "learning_rate": 2.7648e-05, "loss": 0.1462, "step": 9219 }, { "epoch": 17.01108033240997, "grad_norm": 0.7021965980529785, "learning_rate": 2.7651e-05, "loss": 0.0841, "step": 9220 }, { "epoch": 17.0129270544783, "grad_norm": 0.5755544900894165, "learning_rate": 2.7654e-05, "loss": 0.0954, "step": 9221 }, { "epoch": 17.01477377654663, "grad_norm": 0.6425787210464478, "learning_rate": 2.7657000000000003e-05, "loss": 0.0837, "step": 9222 }, { "epoch": 17.01662049861496, "grad_norm": 0.41704753041267395, "learning_rate": 2.7660000000000003e-05, "loss": 0.0527, "step": 9223 }, { "epoch": 17.018467220683288, "grad_norm": 0.5317704677581787, "learning_rate": 2.7663000000000003e-05, "loss": 0.0558, "step": 9224 }, { "epoch": 17.020313942751617, "grad_norm": 0.7003104090690613, "learning_rate": 2.7666000000000002e-05, "loss": 0.1217, "step": 9225 }, { "epoch": 17.022160664819946, "grad_norm": 0.38377368450164795, "learning_rate": 2.7669000000000002e-05, "loss": 0.0473, "step": 9226 }, { "epoch": 17.02400738688827, "grad_norm": 0.8063497543334961, "learning_rate": 2.7672000000000002e-05, "loss": 0.0967, "step": 9227 }, { "epoch": 17.0258541089566, "grad_norm": 0.7322953343391418, "learning_rate": 2.7675000000000002e-05, "loss": 0.1003, "step": 9228 }, { "epoch": 17.02770083102493, "grad_norm": 2.8443334102630615, "learning_rate": 2.7678e-05, "loss": 0.0507, "step": 9229 }, { "epoch": 17.02954755309326, "grad_norm": 0.2949460744857788, "learning_rate": 2.7680999999999998e-05, "loss": 0.0211, "step": 9230 }, { "epoch": 17.03139427516159, "grad_norm": 0.22077718377113342, "learning_rate": 2.7683999999999998e-05, "loss": 0.0123, "step": 9231 }, { "epoch": 17.033240997229917, "grad_norm": 0.21740186214447021, "learning_rate": 2.7687e-05, "loss": 0.0154, "step": 9232 }, { "epoch": 17.035087719298247, "grad_norm": 0.33870649337768555, "learning_rate": 2.769e-05, "loss": 0.0352, "step": 9233 }, { "epoch": 17.036934441366576, "grad_norm": 0.28785428404808044, "learning_rate": 2.7693e-05, "loss": 0.0124, "step": 9234 }, { "epoch": 17.0387811634349, "grad_norm": 0.25172266364097595, "learning_rate": 2.7696e-05, "loss": 0.0099, "step": 9235 }, { "epoch": 17.04062788550323, "grad_norm": 0.29542961716651917, "learning_rate": 2.7699e-05, "loss": 0.0116, "step": 9236 }, { "epoch": 17.04247460757156, "grad_norm": 0.3282698690891266, "learning_rate": 2.7702e-05, "loss": 0.0155, "step": 9237 }, { "epoch": 17.04432132963989, "grad_norm": 0.4257015883922577, "learning_rate": 2.7705e-05, "loss": 0.0243, "step": 9238 }, { "epoch": 17.046168051708218, "grad_norm": 0.22363309562206268, "learning_rate": 2.7708e-05, "loss": 0.0111, "step": 9239 }, { "epoch": 17.048014773776547, "grad_norm": 0.37432435154914856, "learning_rate": 2.7711e-05, "loss": 0.0154, "step": 9240 }, { "epoch": 17.049861495844876, "grad_norm": 0.27944135665893555, "learning_rate": 2.7714e-05, "loss": 0.0076, "step": 9241 }, { "epoch": 17.051708217913205, "grad_norm": 0.3036108911037445, "learning_rate": 2.7717000000000002e-05, "loss": 0.0108, "step": 9242 }, { "epoch": 17.053554939981534, "grad_norm": 0.34930723905563354, "learning_rate": 2.7720000000000002e-05, "loss": 0.013, "step": 9243 }, { "epoch": 17.05540166204986, "grad_norm": 0.3175663948059082, "learning_rate": 2.7723000000000002e-05, "loss": 0.0098, "step": 9244 }, { "epoch": 17.05724838411819, "grad_norm": 0.36189937591552734, "learning_rate": 2.7726000000000002e-05, "loss": 0.0155, "step": 9245 }, { "epoch": 17.05909510618652, "grad_norm": 0.2590970993041992, "learning_rate": 2.7729e-05, "loss": 0.0073, "step": 9246 }, { "epoch": 17.060941828254848, "grad_norm": 0.16251273453235626, "learning_rate": 2.7732e-05, "loss": 0.0042, "step": 9247 }, { "epoch": 17.062788550323177, "grad_norm": 0.28405195474624634, "learning_rate": 2.7735e-05, "loss": 0.009, "step": 9248 }, { "epoch": 17.064635272391506, "grad_norm": 0.3556298613548279, "learning_rate": 2.7738e-05, "loss": 0.0104, "step": 9249 }, { "epoch": 17.066481994459835, "grad_norm": 0.2897495627403259, "learning_rate": 2.7741e-05, "loss": 0.0141, "step": 9250 }, { "epoch": 17.068328716528164, "grad_norm": 0.5381990075111389, "learning_rate": 2.7744e-05, "loss": 0.0099, "step": 9251 }, { "epoch": 17.07017543859649, "grad_norm": 0.2446378916501999, "learning_rate": 2.7747000000000004e-05, "loss": 0.0105, "step": 9252 }, { "epoch": 17.07202216066482, "grad_norm": 0.4810533821582794, "learning_rate": 2.7750000000000004e-05, "loss": 0.0113, "step": 9253 }, { "epoch": 17.073868882733148, "grad_norm": 0.3621048629283905, "learning_rate": 2.7753e-05, "loss": 0.0106, "step": 9254 }, { "epoch": 17.075715604801477, "grad_norm": 0.2906154990196228, "learning_rate": 2.7756e-05, "loss": 0.0132, "step": 9255 }, { "epoch": 17.077562326869806, "grad_norm": 0.240259051322937, "learning_rate": 2.7759e-05, "loss": 0.006, "step": 9256 }, { "epoch": 17.079409048938135, "grad_norm": 0.35923489928245544, "learning_rate": 2.7762e-05, "loss": 0.0107, "step": 9257 }, { "epoch": 17.081255771006465, "grad_norm": 0.30543577671051025, "learning_rate": 2.7765e-05, "loss": 0.0044, "step": 9258 }, { "epoch": 17.083102493074794, "grad_norm": 0.3810657858848572, "learning_rate": 2.7768e-05, "loss": 0.0119, "step": 9259 }, { "epoch": 17.08494921514312, "grad_norm": 0.2955728769302368, "learning_rate": 2.7771e-05, "loss": 0.0096, "step": 9260 }, { "epoch": 17.08679593721145, "grad_norm": 0.9007036089897156, "learning_rate": 2.7774e-05, "loss": 0.0328, "step": 9261 }, { "epoch": 17.088642659279778, "grad_norm": 0.3021596074104309, "learning_rate": 2.7777e-05, "loss": 0.009, "step": 9262 }, { "epoch": 17.090489381348107, "grad_norm": 0.39645281434059143, "learning_rate": 2.778e-05, "loss": 0.0103, "step": 9263 }, { "epoch": 17.092336103416436, "grad_norm": 0.45379069447517395, "learning_rate": 2.7783e-05, "loss": 0.0064, "step": 9264 }, { "epoch": 17.094182825484765, "grad_norm": 0.7573075294494629, "learning_rate": 2.7786e-05, "loss": 0.1849, "step": 9265 }, { "epoch": 17.096029547553094, "grad_norm": 0.6216201186180115, "learning_rate": 2.7789e-05, "loss": 0.1987, "step": 9266 }, { "epoch": 17.097876269621423, "grad_norm": 0.5163629651069641, "learning_rate": 2.7792e-05, "loss": 0.1449, "step": 9267 }, { "epoch": 17.099722991689752, "grad_norm": 0.5870918035507202, "learning_rate": 2.7795e-05, "loss": 0.1147, "step": 9268 }, { "epoch": 17.101569713758078, "grad_norm": 0.5727010369300842, "learning_rate": 2.7798e-05, "loss": 0.1186, "step": 9269 }, { "epoch": 17.103416435826407, "grad_norm": 0.4444744884967804, "learning_rate": 2.7801e-05, "loss": 0.0816, "step": 9270 }, { "epoch": 17.105263157894736, "grad_norm": 0.5740081071853638, "learning_rate": 2.7804e-05, "loss": 0.0998, "step": 9271 }, { "epoch": 17.107109879963065, "grad_norm": 0.3582746684551239, "learning_rate": 2.7807e-05, "loss": 0.0796, "step": 9272 }, { "epoch": 17.108956602031395, "grad_norm": 0.5588990449905396, "learning_rate": 2.7810000000000003e-05, "loss": 0.0924, "step": 9273 }, { "epoch": 17.110803324099724, "grad_norm": 0.31783145666122437, "learning_rate": 2.7813000000000003e-05, "loss": 0.0601, "step": 9274 }, { "epoch": 17.112650046168053, "grad_norm": 0.553686797618866, "learning_rate": 2.7816000000000003e-05, "loss": 0.0395, "step": 9275 }, { "epoch": 17.114496768236382, "grad_norm": 0.28794318437576294, "learning_rate": 2.7819000000000002e-05, "loss": 0.0467, "step": 9276 }, { "epoch": 17.116343490304708, "grad_norm": 1.769435167312622, "learning_rate": 2.7822000000000002e-05, "loss": 0.0312, "step": 9277 }, { "epoch": 17.118190212373037, "grad_norm": 0.4049617350101471, "learning_rate": 2.7825000000000002e-05, "loss": 0.0172, "step": 9278 }, { "epoch": 17.120036934441366, "grad_norm": 0.41664329171180725, "learning_rate": 2.7828e-05, "loss": 0.0386, "step": 9279 }, { "epoch": 17.121883656509695, "grad_norm": 0.31309565901756287, "learning_rate": 2.7831e-05, "loss": 0.0355, "step": 9280 }, { "epoch": 17.123730378578024, "grad_norm": 0.26691946387290955, "learning_rate": 2.7833999999999998e-05, "loss": 0.0302, "step": 9281 }, { "epoch": 17.125577100646353, "grad_norm": 0.3606624901294708, "learning_rate": 2.7836999999999998e-05, "loss": 0.0172, "step": 9282 }, { "epoch": 17.127423822714682, "grad_norm": 0.29028576612472534, "learning_rate": 2.784e-05, "loss": 0.016, "step": 9283 }, { "epoch": 17.12927054478301, "grad_norm": 0.32634156942367554, "learning_rate": 2.7843e-05, "loss": 0.0746, "step": 9284 }, { "epoch": 17.131117266851337, "grad_norm": 0.297480970621109, "learning_rate": 2.7846e-05, "loss": 0.0164, "step": 9285 }, { "epoch": 17.132963988919666, "grad_norm": 0.3533071279525757, "learning_rate": 2.7849e-05, "loss": 0.0116, "step": 9286 }, { "epoch": 17.134810710987995, "grad_norm": 0.33252015709877014, "learning_rate": 2.7852e-05, "loss": 0.0139, "step": 9287 }, { "epoch": 17.136657433056325, "grad_norm": 0.5829310417175293, "learning_rate": 2.7855e-05, "loss": 0.0167, "step": 9288 }, { "epoch": 17.138504155124654, "grad_norm": 0.5431162118911743, "learning_rate": 2.7858e-05, "loss": 0.0258, "step": 9289 }, { "epoch": 17.140350877192983, "grad_norm": 0.279328852891922, "learning_rate": 2.7861e-05, "loss": 0.0136, "step": 9290 }, { "epoch": 17.142197599261312, "grad_norm": 0.31380873918533325, "learning_rate": 2.7864e-05, "loss": 0.0082, "step": 9291 }, { "epoch": 17.14404432132964, "grad_norm": 0.4092232286930084, "learning_rate": 2.7867e-05, "loss": 0.0074, "step": 9292 }, { "epoch": 17.14589104339797, "grad_norm": 0.29413503408432007, "learning_rate": 2.7870000000000003e-05, "loss": 0.0088, "step": 9293 }, { "epoch": 17.147737765466296, "grad_norm": 0.23219628632068634, "learning_rate": 2.7873000000000002e-05, "loss": 0.0103, "step": 9294 }, { "epoch": 17.149584487534625, "grad_norm": 1.2620007991790771, "learning_rate": 2.7876000000000002e-05, "loss": 0.0213, "step": 9295 }, { "epoch": 17.151431209602954, "grad_norm": 0.24481666088104248, "learning_rate": 2.7879000000000002e-05, "loss": 0.0053, "step": 9296 }, { "epoch": 17.153277931671283, "grad_norm": 0.30119237303733826, "learning_rate": 2.7882000000000002e-05, "loss": 0.0088, "step": 9297 }, { "epoch": 17.155124653739612, "grad_norm": 0.6697653532028198, "learning_rate": 2.7885e-05, "loss": 0.0179, "step": 9298 }, { "epoch": 17.15697137580794, "grad_norm": 0.3480161726474762, "learning_rate": 2.7888e-05, "loss": 0.0159, "step": 9299 }, { "epoch": 17.15881809787627, "grad_norm": 0.22501026093959808, "learning_rate": 2.7891e-05, "loss": 0.0094, "step": 9300 }, { "epoch": 17.1606648199446, "grad_norm": 0.37073078751564026, "learning_rate": 2.7894e-05, "loss": 0.0166, "step": 9301 }, { "epoch": 17.162511542012926, "grad_norm": 0.34021317958831787, "learning_rate": 2.7897e-05, "loss": 0.0103, "step": 9302 }, { "epoch": 17.164358264081255, "grad_norm": 0.7827439308166504, "learning_rate": 2.79e-05, "loss": 0.0143, "step": 9303 }, { "epoch": 17.166204986149584, "grad_norm": 0.39998817443847656, "learning_rate": 2.7903e-05, "loss": 0.0173, "step": 9304 }, { "epoch": 17.168051708217913, "grad_norm": 0.3274659812450409, "learning_rate": 2.7906e-05, "loss": 0.0089, "step": 9305 }, { "epoch": 17.169898430286242, "grad_norm": 0.2546210289001465, "learning_rate": 2.7909e-05, "loss": 0.0118, "step": 9306 }, { "epoch": 17.17174515235457, "grad_norm": 0.26053136587142944, "learning_rate": 2.7912e-05, "loss": 0.0078, "step": 9307 }, { "epoch": 17.1735918744229, "grad_norm": 0.4792042374610901, "learning_rate": 2.7915e-05, "loss": 0.0164, "step": 9308 }, { "epoch": 17.17543859649123, "grad_norm": 0.540781557559967, "learning_rate": 2.7918e-05, "loss": 0.0245, "step": 9309 }, { "epoch": 17.177285318559555, "grad_norm": 0.5518727898597717, "learning_rate": 2.7921e-05, "loss": 0.0173, "step": 9310 }, { "epoch": 17.179132040627884, "grad_norm": 0.5841360092163086, "learning_rate": 2.7924e-05, "loss": 0.0098, "step": 9311 }, { "epoch": 17.180978762696213, "grad_norm": 0.9632762670516968, "learning_rate": 2.7927e-05, "loss": 0.0153, "step": 9312 }, { "epoch": 17.182825484764543, "grad_norm": 0.4417622983455658, "learning_rate": 2.7930000000000002e-05, "loss": 0.0097, "step": 9313 }, { "epoch": 17.18467220683287, "grad_norm": 0.6527366042137146, "learning_rate": 2.7933000000000002e-05, "loss": 0.0261, "step": 9314 }, { "epoch": 17.1865189289012, "grad_norm": 0.8529950976371765, "learning_rate": 2.7936e-05, "loss": 0.3008, "step": 9315 }, { "epoch": 17.18836565096953, "grad_norm": 0.5253120064735413, "learning_rate": 2.7939e-05, "loss": 0.1189, "step": 9316 }, { "epoch": 17.19021237303786, "grad_norm": 0.4528539776802063, "learning_rate": 2.7942e-05, "loss": 0.1442, "step": 9317 }, { "epoch": 17.19205909510619, "grad_norm": 0.7505183219909668, "learning_rate": 2.7945e-05, "loss": 0.153, "step": 9318 }, { "epoch": 17.193905817174514, "grad_norm": 0.831365168094635, "learning_rate": 2.7948e-05, "loss": 0.1751, "step": 9319 }, { "epoch": 17.195752539242843, "grad_norm": 0.8180812001228333, "learning_rate": 2.7951e-05, "loss": 0.1076, "step": 9320 }, { "epoch": 17.197599261311172, "grad_norm": 0.5121163725852966, "learning_rate": 2.7954e-05, "loss": 0.0991, "step": 9321 }, { "epoch": 17.1994459833795, "grad_norm": 0.3973347544670105, "learning_rate": 2.7957e-05, "loss": 0.0564, "step": 9322 }, { "epoch": 17.20129270544783, "grad_norm": 0.5886101126670837, "learning_rate": 2.7960000000000003e-05, "loss": 0.0696, "step": 9323 }, { "epoch": 17.20313942751616, "grad_norm": 0.38349026441574097, "learning_rate": 2.7963000000000003e-05, "loss": 0.0792, "step": 9324 }, { "epoch": 17.20498614958449, "grad_norm": 0.4047221541404724, "learning_rate": 2.7966000000000003e-05, "loss": 0.0519, "step": 9325 }, { "epoch": 17.206832871652818, "grad_norm": 0.3595011532306671, "learning_rate": 2.7969000000000003e-05, "loss": 0.0392, "step": 9326 }, { "epoch": 17.208679593721143, "grad_norm": 0.294626384973526, "learning_rate": 2.7972000000000003e-05, "loss": 0.0484, "step": 9327 }, { "epoch": 17.210526315789473, "grad_norm": 0.3707464933395386, "learning_rate": 2.7975e-05, "loss": 0.0366, "step": 9328 }, { "epoch": 17.2123730378578, "grad_norm": 0.3111836314201355, "learning_rate": 2.7978e-05, "loss": 0.0371, "step": 9329 }, { "epoch": 17.21421975992613, "grad_norm": 0.19050897657871246, "learning_rate": 2.7981e-05, "loss": 0.0243, "step": 9330 }, { "epoch": 17.21606648199446, "grad_norm": 0.2788408100605011, "learning_rate": 2.7984e-05, "loss": 0.0133, "step": 9331 }, { "epoch": 17.21791320406279, "grad_norm": 0.3897608518600464, "learning_rate": 2.7986999999999998e-05, "loss": 0.0149, "step": 9332 }, { "epoch": 17.21975992613112, "grad_norm": 0.24043641984462738, "learning_rate": 2.799e-05, "loss": 0.0417, "step": 9333 }, { "epoch": 17.221606648199447, "grad_norm": 0.3331644833087921, "learning_rate": 2.7993e-05, "loss": 0.0124, "step": 9334 }, { "epoch": 17.223453370267773, "grad_norm": 0.2830711901187897, "learning_rate": 2.7996e-05, "loss": 0.012, "step": 9335 }, { "epoch": 17.225300092336102, "grad_norm": 0.21424072980880737, "learning_rate": 2.7999e-05, "loss": 0.0065, "step": 9336 }, { "epoch": 17.22714681440443, "grad_norm": 0.6466320753097534, "learning_rate": 2.8002e-05, "loss": 0.0103, "step": 9337 }, { "epoch": 17.22899353647276, "grad_norm": 0.2295599728822708, "learning_rate": 2.8005e-05, "loss": 0.01, "step": 9338 }, { "epoch": 17.23084025854109, "grad_norm": 0.2938486635684967, "learning_rate": 2.8008e-05, "loss": 0.0096, "step": 9339 }, { "epoch": 17.23268698060942, "grad_norm": 0.17863905429840088, "learning_rate": 2.8011e-05, "loss": 0.0069, "step": 9340 }, { "epoch": 17.234533702677748, "grad_norm": 0.1935294270515442, "learning_rate": 2.8014e-05, "loss": 0.0125, "step": 9341 }, { "epoch": 17.236380424746077, "grad_norm": 0.9676811099052429, "learning_rate": 2.8017e-05, "loss": 0.0269, "step": 9342 }, { "epoch": 17.238227146814406, "grad_norm": 0.45683619379997253, "learning_rate": 2.8020000000000003e-05, "loss": 0.0171, "step": 9343 }, { "epoch": 17.24007386888273, "grad_norm": 0.40154099464416504, "learning_rate": 2.8023000000000003e-05, "loss": 0.0114, "step": 9344 }, { "epoch": 17.24192059095106, "grad_norm": 0.4316021203994751, "learning_rate": 2.8026000000000002e-05, "loss": 0.0365, "step": 9345 }, { "epoch": 17.24376731301939, "grad_norm": 0.3077472448348999, "learning_rate": 2.8029000000000002e-05, "loss": 0.0118, "step": 9346 }, { "epoch": 17.24561403508772, "grad_norm": 0.49241816997528076, "learning_rate": 2.8032000000000002e-05, "loss": 0.0107, "step": 9347 }, { "epoch": 17.24746075715605, "grad_norm": 0.3258560299873352, "learning_rate": 2.8035000000000002e-05, "loss": 0.0114, "step": 9348 }, { "epoch": 17.249307479224377, "grad_norm": 0.33918431401252747, "learning_rate": 2.8038e-05, "loss": 0.0141, "step": 9349 }, { "epoch": 17.251154201292707, "grad_norm": 0.29143860936164856, "learning_rate": 2.8041e-05, "loss": 0.0134, "step": 9350 }, { "epoch": 17.253000923361036, "grad_norm": 0.39543747901916504, "learning_rate": 2.8044e-05, "loss": 0.0145, "step": 9351 }, { "epoch": 17.25484764542936, "grad_norm": 0.2542322874069214, "learning_rate": 2.8047e-05, "loss": 0.0113, "step": 9352 }, { "epoch": 17.25669436749769, "grad_norm": 0.28918883204460144, "learning_rate": 2.805e-05, "loss": 0.0063, "step": 9353 }, { "epoch": 17.25854108956602, "grad_norm": 0.5130068063735962, "learning_rate": 2.8053e-05, "loss": 0.0175, "step": 9354 }, { "epoch": 17.26038781163435, "grad_norm": 0.6604469418525696, "learning_rate": 2.8056e-05, "loss": 0.0121, "step": 9355 }, { "epoch": 17.262234533702678, "grad_norm": 0.21387112140655518, "learning_rate": 2.8059e-05, "loss": 0.0078, "step": 9356 }, { "epoch": 17.264081255771007, "grad_norm": 0.7465587854385376, "learning_rate": 2.8062e-05, "loss": 0.022, "step": 9357 }, { "epoch": 17.265927977839336, "grad_norm": 0.9279031157493591, "learning_rate": 2.8065e-05, "loss": 0.0261, "step": 9358 }, { "epoch": 17.267774699907665, "grad_norm": 0.23070333898067474, "learning_rate": 2.8068e-05, "loss": 0.0071, "step": 9359 }, { "epoch": 17.26962142197599, "grad_norm": 0.3000924587249756, "learning_rate": 2.8071e-05, "loss": 0.0152, "step": 9360 }, { "epoch": 17.27146814404432, "grad_norm": 0.36132705211639404, "learning_rate": 2.8074e-05, "loss": 0.0099, "step": 9361 }, { "epoch": 17.27331486611265, "grad_norm": 0.3235369920730591, "learning_rate": 2.8077e-05, "loss": 0.0123, "step": 9362 }, { "epoch": 17.27516158818098, "grad_norm": 3.1168806552886963, "learning_rate": 2.8080000000000002e-05, "loss": 0.0307, "step": 9363 }, { "epoch": 17.277008310249307, "grad_norm": 0.44572263956069946, "learning_rate": 2.8083000000000002e-05, "loss": 0.0152, "step": 9364 }, { "epoch": 17.278855032317637, "grad_norm": 1.7113220691680908, "learning_rate": 2.8086000000000002e-05, "loss": 0.1906, "step": 9365 }, { "epoch": 17.280701754385966, "grad_norm": 0.5876774191856384, "learning_rate": 2.8089e-05, "loss": 0.1274, "step": 9366 }, { "epoch": 17.282548476454295, "grad_norm": 0.8600068688392639, "learning_rate": 2.8092e-05, "loss": 0.1518, "step": 9367 }, { "epoch": 17.284395198522624, "grad_norm": 0.47339197993278503, "learning_rate": 2.8095e-05, "loss": 0.0893, "step": 9368 }, { "epoch": 17.28624192059095, "grad_norm": 0.4132726192474365, "learning_rate": 2.8098e-05, "loss": 0.0815, "step": 9369 }, { "epoch": 17.28808864265928, "grad_norm": 0.4227193295955658, "learning_rate": 2.8101e-05, "loss": 0.0712, "step": 9370 }, { "epoch": 17.289935364727608, "grad_norm": 0.45899859070777893, "learning_rate": 2.8104e-05, "loss": 0.0947, "step": 9371 }, { "epoch": 17.291782086795937, "grad_norm": 0.4160376489162445, "learning_rate": 2.8107e-05, "loss": 0.0745, "step": 9372 }, { "epoch": 17.293628808864266, "grad_norm": 0.3612839877605438, "learning_rate": 2.8110000000000004e-05, "loss": 0.0645, "step": 9373 }, { "epoch": 17.295475530932595, "grad_norm": 0.34148138761520386, "learning_rate": 2.8113000000000003e-05, "loss": 0.0461, "step": 9374 }, { "epoch": 17.297322253000925, "grad_norm": 0.4269258975982666, "learning_rate": 2.8116000000000003e-05, "loss": 0.1095, "step": 9375 }, { "epoch": 17.299168975069254, "grad_norm": 0.3814285695552826, "learning_rate": 2.8119000000000003e-05, "loss": 0.0368, "step": 9376 }, { "epoch": 17.30101569713758, "grad_norm": 0.7113535404205322, "learning_rate": 2.8122e-05, "loss": 0.0425, "step": 9377 }, { "epoch": 17.30286241920591, "grad_norm": 0.40248632431030273, "learning_rate": 2.8125e-05, "loss": 0.0601, "step": 9378 }, { "epoch": 17.304709141274238, "grad_norm": 0.3561994433403015, "learning_rate": 2.8128e-05, "loss": 0.0527, "step": 9379 }, { "epoch": 17.306555863342567, "grad_norm": 0.21789389848709106, "learning_rate": 2.8131e-05, "loss": 0.0346, "step": 9380 }, { "epoch": 17.308402585410896, "grad_norm": 0.4023694396018982, "learning_rate": 2.8134e-05, "loss": 0.0165, "step": 9381 }, { "epoch": 17.310249307479225, "grad_norm": 0.5231220722198486, "learning_rate": 2.8137e-05, "loss": 0.0317, "step": 9382 }, { "epoch": 17.312096029547554, "grad_norm": 0.3214775025844574, "learning_rate": 2.8139999999999998e-05, "loss": 0.0127, "step": 9383 }, { "epoch": 17.313942751615883, "grad_norm": 0.2625340223312378, "learning_rate": 2.8143e-05, "loss": 0.0223, "step": 9384 }, { "epoch": 17.31578947368421, "grad_norm": 0.41229403018951416, "learning_rate": 2.8146e-05, "loss": 0.0202, "step": 9385 }, { "epoch": 17.317636195752538, "grad_norm": 0.37261727452278137, "learning_rate": 2.8149e-05, "loss": 0.0201, "step": 9386 }, { "epoch": 17.319482917820867, "grad_norm": 0.16855792701244354, "learning_rate": 2.8152e-05, "loss": 0.0102, "step": 9387 }, { "epoch": 17.321329639889196, "grad_norm": 0.3535830080509186, "learning_rate": 2.8155e-05, "loss": 0.0155, "step": 9388 }, { "epoch": 17.323176361957525, "grad_norm": 0.3080908954143524, "learning_rate": 2.8158e-05, "loss": 0.02, "step": 9389 }, { "epoch": 17.325023084025855, "grad_norm": 0.32726743817329407, "learning_rate": 2.8161e-05, "loss": 0.0166, "step": 9390 }, { "epoch": 17.326869806094184, "grad_norm": 1.4310134649276733, "learning_rate": 2.8164e-05, "loss": 0.0222, "step": 9391 }, { "epoch": 17.328716528162513, "grad_norm": 0.4071342349052429, "learning_rate": 2.8167e-05, "loss": 0.0298, "step": 9392 }, { "epoch": 17.330563250230842, "grad_norm": 0.2208002209663391, "learning_rate": 2.817e-05, "loss": 0.0117, "step": 9393 }, { "epoch": 17.332409972299168, "grad_norm": 0.3327980637550354, "learning_rate": 2.8173000000000003e-05, "loss": 0.0147, "step": 9394 }, { "epoch": 17.334256694367497, "grad_norm": 0.7948895692825317, "learning_rate": 2.8176000000000003e-05, "loss": 0.0258, "step": 9395 }, { "epoch": 17.336103416435826, "grad_norm": 0.2921746075153351, "learning_rate": 2.8179000000000002e-05, "loss": 0.0074, "step": 9396 }, { "epoch": 17.337950138504155, "grad_norm": 0.6576547622680664, "learning_rate": 2.8182000000000002e-05, "loss": 0.0144, "step": 9397 }, { "epoch": 17.339796860572484, "grad_norm": 0.41292092204093933, "learning_rate": 2.8185000000000002e-05, "loss": 0.0158, "step": 9398 }, { "epoch": 17.341643582640813, "grad_norm": 0.2634788453578949, "learning_rate": 2.8188000000000002e-05, "loss": 0.0077, "step": 9399 }, { "epoch": 17.343490304709142, "grad_norm": 0.42348232865333557, "learning_rate": 2.8191e-05, "loss": 0.0092, "step": 9400 }, { "epoch": 17.34533702677747, "grad_norm": 0.7069520354270935, "learning_rate": 2.8194e-05, "loss": 0.0156, "step": 9401 }, { "epoch": 17.347183748845797, "grad_norm": 0.6102639436721802, "learning_rate": 2.8196999999999998e-05, "loss": 0.0219, "step": 9402 }, { "epoch": 17.349030470914126, "grad_norm": 0.5276309251785278, "learning_rate": 2.8199999999999998e-05, "loss": 0.0192, "step": 9403 }, { "epoch": 17.350877192982455, "grad_norm": 0.24854065477848053, "learning_rate": 2.8203e-05, "loss": 0.0119, "step": 9404 }, { "epoch": 17.352723915050785, "grad_norm": 0.7557331919670105, "learning_rate": 2.8206e-05, "loss": 0.0199, "step": 9405 }, { "epoch": 17.354570637119114, "grad_norm": 0.5607580542564392, "learning_rate": 2.8209e-05, "loss": 0.0141, "step": 9406 }, { "epoch": 17.356417359187443, "grad_norm": 0.6083971858024597, "learning_rate": 2.8212e-05, "loss": 0.0179, "step": 9407 }, { "epoch": 17.358264081255772, "grad_norm": 0.517368733882904, "learning_rate": 2.8215e-05, "loss": 0.0138, "step": 9408 }, { "epoch": 17.3601108033241, "grad_norm": 0.20876462757587433, "learning_rate": 2.8218e-05, "loss": 0.0079, "step": 9409 }, { "epoch": 17.361957525392427, "grad_norm": 0.28480789065361023, "learning_rate": 2.8221e-05, "loss": 0.0075, "step": 9410 }, { "epoch": 17.363804247460756, "grad_norm": 0.3195708692073822, "learning_rate": 2.8224e-05, "loss": 0.0121, "step": 9411 }, { "epoch": 17.365650969529085, "grad_norm": 0.6342963576316833, "learning_rate": 2.8227e-05, "loss": 0.0279, "step": 9412 }, { "epoch": 17.367497691597414, "grad_norm": 0.44565221667289734, "learning_rate": 2.823e-05, "loss": 0.0132, "step": 9413 }, { "epoch": 17.369344413665743, "grad_norm": 1.350311517715454, "learning_rate": 2.8233000000000002e-05, "loss": 0.031, "step": 9414 }, { "epoch": 17.371191135734072, "grad_norm": 0.8127965331077576, "learning_rate": 2.8236000000000002e-05, "loss": 0.2402, "step": 9415 }, { "epoch": 17.3730378578024, "grad_norm": 0.6130790710449219, "learning_rate": 2.8239000000000002e-05, "loss": 0.2571, "step": 9416 }, { "epoch": 17.37488457987073, "grad_norm": 0.7279358506202698, "learning_rate": 2.8242e-05, "loss": 0.1271, "step": 9417 }, { "epoch": 17.37673130193906, "grad_norm": 0.730381429195404, "learning_rate": 2.8245e-05, "loss": 0.112, "step": 9418 }, { "epoch": 17.378578024007385, "grad_norm": 0.5133957266807556, "learning_rate": 2.8248e-05, "loss": 0.092, "step": 9419 }, { "epoch": 17.380424746075715, "grad_norm": 0.6243531107902527, "learning_rate": 2.8251e-05, "loss": 0.1239, "step": 9420 }, { "epoch": 17.382271468144044, "grad_norm": 0.536197304725647, "learning_rate": 2.8254e-05, "loss": 0.0683, "step": 9421 }, { "epoch": 17.384118190212373, "grad_norm": 0.5808553099632263, "learning_rate": 2.8257e-05, "loss": 0.061, "step": 9422 }, { "epoch": 17.385964912280702, "grad_norm": 0.5259320139884949, "learning_rate": 2.826e-05, "loss": 0.087, "step": 9423 }, { "epoch": 17.38781163434903, "grad_norm": 0.3729979991912842, "learning_rate": 2.8263000000000004e-05, "loss": 0.049, "step": 9424 }, { "epoch": 17.38965835641736, "grad_norm": 0.4005459249019623, "learning_rate": 2.8266000000000003e-05, "loss": 0.0563, "step": 9425 }, { "epoch": 17.39150507848569, "grad_norm": 0.8215917348861694, "learning_rate": 2.8269e-05, "loss": 0.0378, "step": 9426 }, { "epoch": 17.393351800554015, "grad_norm": 1.047788381576538, "learning_rate": 2.8272e-05, "loss": 0.1099, "step": 9427 }, { "epoch": 17.395198522622344, "grad_norm": 0.41913044452667236, "learning_rate": 2.8275e-05, "loss": 0.0354, "step": 9428 }, { "epoch": 17.397045244690673, "grad_norm": 0.29535073041915894, "learning_rate": 2.8278e-05, "loss": 0.0308, "step": 9429 }, { "epoch": 17.398891966759003, "grad_norm": 0.43361786007881165, "learning_rate": 2.8281e-05, "loss": 0.0184, "step": 9430 }, { "epoch": 17.40073868882733, "grad_norm": 0.2104760706424713, "learning_rate": 2.8284e-05, "loss": 0.0098, "step": 9431 }, { "epoch": 17.40258541089566, "grad_norm": 0.640812337398529, "learning_rate": 2.8287e-05, "loss": 0.0366, "step": 9432 }, { "epoch": 17.40443213296399, "grad_norm": 0.6072062253952026, "learning_rate": 2.829e-05, "loss": 0.0237, "step": 9433 }, { "epoch": 17.40627885503232, "grad_norm": 0.22999133169651031, "learning_rate": 2.8293e-05, "loss": 0.0106, "step": 9434 }, { "epoch": 17.408125577100645, "grad_norm": 0.360470712184906, "learning_rate": 2.8296e-05, "loss": 0.0168, "step": 9435 }, { "epoch": 17.409972299168974, "grad_norm": 0.22122471034526825, "learning_rate": 2.8299e-05, "loss": 0.0184, "step": 9436 }, { "epoch": 17.411819021237303, "grad_norm": 0.23513934016227722, "learning_rate": 2.8302e-05, "loss": 0.0137, "step": 9437 }, { "epoch": 17.413665743305632, "grad_norm": 0.4968354403972626, "learning_rate": 2.8305e-05, "loss": 0.0186, "step": 9438 }, { "epoch": 17.41551246537396, "grad_norm": 0.4751613438129425, "learning_rate": 2.8308e-05, "loss": 0.0163, "step": 9439 }, { "epoch": 17.41735918744229, "grad_norm": 0.3601779043674469, "learning_rate": 2.8311e-05, "loss": 0.0131, "step": 9440 }, { "epoch": 17.41920590951062, "grad_norm": 1.2434005737304688, "learning_rate": 2.8314e-05, "loss": 0.0195, "step": 9441 }, { "epoch": 17.42105263157895, "grad_norm": 0.6260395646095276, "learning_rate": 2.8317e-05, "loss": 0.0097, "step": 9442 }, { "epoch": 17.422899353647278, "grad_norm": 0.3804498612880707, "learning_rate": 2.832e-05, "loss": 0.0171, "step": 9443 }, { "epoch": 17.424746075715603, "grad_norm": 0.6318892240524292, "learning_rate": 2.8323000000000003e-05, "loss": 0.0204, "step": 9444 }, { "epoch": 17.426592797783933, "grad_norm": 0.48028627038002014, "learning_rate": 2.8326000000000003e-05, "loss": 0.0147, "step": 9445 }, { "epoch": 17.42843951985226, "grad_norm": 0.19748181104660034, "learning_rate": 2.8329000000000003e-05, "loss": 0.0088, "step": 9446 }, { "epoch": 17.43028624192059, "grad_norm": 0.23807014524936676, "learning_rate": 2.8332000000000002e-05, "loss": 0.0116, "step": 9447 }, { "epoch": 17.43213296398892, "grad_norm": 0.4963562786579132, "learning_rate": 2.8335000000000002e-05, "loss": 0.0096, "step": 9448 }, { "epoch": 17.43397968605725, "grad_norm": 0.5319734811782837, "learning_rate": 2.8338000000000002e-05, "loss": 0.0139, "step": 9449 }, { "epoch": 17.43582640812558, "grad_norm": 0.3593072295188904, "learning_rate": 2.8341000000000002e-05, "loss": 0.0092, "step": 9450 }, { "epoch": 17.437673130193907, "grad_norm": 0.40505778789520264, "learning_rate": 2.8344e-05, "loss": 0.0127, "step": 9451 }, { "epoch": 17.439519852262233, "grad_norm": 0.321172297000885, "learning_rate": 2.8346999999999998e-05, "loss": 0.0099, "step": 9452 }, { "epoch": 17.441366574330562, "grad_norm": 0.26844117045402527, "learning_rate": 2.8349999999999998e-05, "loss": 0.0113, "step": 9453 }, { "epoch": 17.44321329639889, "grad_norm": 0.40463778376579285, "learning_rate": 2.8353e-05, "loss": 0.0162, "step": 9454 }, { "epoch": 17.44506001846722, "grad_norm": 0.5917307734489441, "learning_rate": 2.8356e-05, "loss": 0.0275, "step": 9455 }, { "epoch": 17.44690674053555, "grad_norm": 0.5759962201118469, "learning_rate": 2.8359e-05, "loss": 0.0157, "step": 9456 }, { "epoch": 17.44875346260388, "grad_norm": 0.252387672662735, "learning_rate": 2.8362e-05, "loss": 0.0129, "step": 9457 }, { "epoch": 17.450600184672208, "grad_norm": 0.5068285465240479, "learning_rate": 2.8365e-05, "loss": 0.0172, "step": 9458 }, { "epoch": 17.452446906740537, "grad_norm": 0.2979760468006134, "learning_rate": 2.8368e-05, "loss": 0.016, "step": 9459 }, { "epoch": 17.454293628808863, "grad_norm": 0.557605504989624, "learning_rate": 2.8371e-05, "loss": 0.0218, "step": 9460 }, { "epoch": 17.45614035087719, "grad_norm": 0.6038251519203186, "learning_rate": 2.8374e-05, "loss": 0.0126, "step": 9461 }, { "epoch": 17.45798707294552, "grad_norm": 1.0946751832962036, "learning_rate": 2.8377e-05, "loss": 0.0425, "step": 9462 }, { "epoch": 17.45983379501385, "grad_norm": 0.39458906650543213, "learning_rate": 2.838e-05, "loss": 0.0178, "step": 9463 }, { "epoch": 17.46168051708218, "grad_norm": 0.388188898563385, "learning_rate": 2.8383000000000003e-05, "loss": 0.0131, "step": 9464 }, { "epoch": 17.46352723915051, "grad_norm": 1.3474907875061035, "learning_rate": 2.8386000000000002e-05, "loss": 0.2015, "step": 9465 }, { "epoch": 17.465373961218837, "grad_norm": 0.6771953105926514, "learning_rate": 2.8389000000000002e-05, "loss": 0.1534, "step": 9466 }, { "epoch": 17.467220683287167, "grad_norm": 0.586620032787323, "learning_rate": 2.8392000000000002e-05, "loss": 0.144, "step": 9467 }, { "epoch": 17.469067405355496, "grad_norm": 0.5955852270126343, "learning_rate": 2.8395000000000002e-05, "loss": 0.1074, "step": 9468 }, { "epoch": 17.47091412742382, "grad_norm": 0.7881993055343628, "learning_rate": 2.8398e-05, "loss": 0.0898, "step": 9469 }, { "epoch": 17.47276084949215, "grad_norm": 0.502046525478363, "learning_rate": 2.8401e-05, "loss": 0.0669, "step": 9470 }, { "epoch": 17.47460757156048, "grad_norm": 0.42128366231918335, "learning_rate": 2.8404e-05, "loss": 0.079, "step": 9471 }, { "epoch": 17.47645429362881, "grad_norm": 0.5567536354064941, "learning_rate": 2.8407e-05, "loss": 0.0523, "step": 9472 }, { "epoch": 17.478301015697138, "grad_norm": 0.5962188839912415, "learning_rate": 2.841e-05, "loss": 0.0686, "step": 9473 }, { "epoch": 17.480147737765467, "grad_norm": 0.5559502840042114, "learning_rate": 2.8413000000000004e-05, "loss": 0.08, "step": 9474 }, { "epoch": 17.481994459833796, "grad_norm": 0.5045327544212341, "learning_rate": 2.8416e-05, "loss": 0.0555, "step": 9475 }, { "epoch": 17.483841181902125, "grad_norm": 0.9575005173683167, "learning_rate": 2.8419e-05, "loss": 0.0283, "step": 9476 }, { "epoch": 17.48568790397045, "grad_norm": 0.6482095718383789, "learning_rate": 2.8422e-05, "loss": 0.0452, "step": 9477 }, { "epoch": 17.48753462603878, "grad_norm": 0.3243868350982666, "learning_rate": 2.8425e-05, "loss": 0.0209, "step": 9478 }, { "epoch": 17.48938134810711, "grad_norm": 0.32968437671661377, "learning_rate": 2.8428e-05, "loss": 0.0162, "step": 9479 }, { "epoch": 17.49122807017544, "grad_norm": 0.4083661437034607, "learning_rate": 2.8431e-05, "loss": 0.0213, "step": 9480 }, { "epoch": 17.493074792243767, "grad_norm": 0.3611833453178406, "learning_rate": 2.8434e-05, "loss": 0.0141, "step": 9481 }, { "epoch": 17.494921514312097, "grad_norm": 0.3172003924846649, "learning_rate": 2.8437e-05, "loss": 0.018, "step": 9482 }, { "epoch": 17.496768236380426, "grad_norm": 0.25099191069602966, "learning_rate": 2.844e-05, "loss": 0.0171, "step": 9483 }, { "epoch": 17.498614958448755, "grad_norm": 0.417826384305954, "learning_rate": 2.8443000000000002e-05, "loss": 0.042, "step": 9484 }, { "epoch": 17.50046168051708, "grad_norm": 0.5361323356628418, "learning_rate": 2.8446000000000002e-05, "loss": 0.0366, "step": 9485 }, { "epoch": 17.50230840258541, "grad_norm": 0.2949165999889374, "learning_rate": 2.8449e-05, "loss": 0.0089, "step": 9486 }, { "epoch": 17.50415512465374, "grad_norm": 0.434059739112854, "learning_rate": 2.8452e-05, "loss": 0.0176, "step": 9487 }, { "epoch": 17.506001846722068, "grad_norm": 0.3568911850452423, "learning_rate": 2.8455e-05, "loss": 0.0235, "step": 9488 }, { "epoch": 17.507848568790397, "grad_norm": 0.4586827754974365, "learning_rate": 2.8458e-05, "loss": 0.0091, "step": 9489 }, { "epoch": 17.509695290858726, "grad_norm": 0.38927754759788513, "learning_rate": 2.8461e-05, "loss": 0.03, "step": 9490 }, { "epoch": 17.511542012927055, "grad_norm": 0.27216121554374695, "learning_rate": 2.8464e-05, "loss": 0.0111, "step": 9491 }, { "epoch": 17.513388734995385, "grad_norm": 0.3073829710483551, "learning_rate": 2.8467e-05, "loss": 0.0134, "step": 9492 }, { "epoch": 17.51523545706371, "grad_norm": 0.3827543556690216, "learning_rate": 2.847e-05, "loss": 0.0128, "step": 9493 }, { "epoch": 17.51708217913204, "grad_norm": 0.3475456237792969, "learning_rate": 2.8473000000000003e-05, "loss": 0.012, "step": 9494 }, { "epoch": 17.51892890120037, "grad_norm": 0.21921490132808685, "learning_rate": 2.8476000000000003e-05, "loss": 0.0063, "step": 9495 }, { "epoch": 17.520775623268698, "grad_norm": 0.5031759738922119, "learning_rate": 2.8479000000000003e-05, "loss": 0.0162, "step": 9496 }, { "epoch": 17.522622345337027, "grad_norm": 0.3823947310447693, "learning_rate": 2.8482000000000003e-05, "loss": 0.0096, "step": 9497 }, { "epoch": 17.524469067405356, "grad_norm": 0.3628939688205719, "learning_rate": 2.8485000000000003e-05, "loss": 0.0133, "step": 9498 }, { "epoch": 17.526315789473685, "grad_norm": 0.5429162383079529, "learning_rate": 2.8488000000000002e-05, "loss": 0.0157, "step": 9499 }, { "epoch": 17.528162511542014, "grad_norm": 0.27803874015808105, "learning_rate": 2.8491e-05, "loss": 0.0076, "step": 9500 }, { "epoch": 17.530009233610343, "grad_norm": 0.4344504773616791, "learning_rate": 2.8494e-05, "loss": 0.0097, "step": 9501 }, { "epoch": 17.53185595567867, "grad_norm": 0.2885424494743347, "learning_rate": 2.8497e-05, "loss": 0.0138, "step": 9502 }, { "epoch": 17.533702677746998, "grad_norm": 0.6529332399368286, "learning_rate": 2.8499999999999998e-05, "loss": 0.0264, "step": 9503 }, { "epoch": 17.535549399815327, "grad_norm": 1.1576621532440186, "learning_rate": 2.8502999999999998e-05, "loss": 0.014, "step": 9504 }, { "epoch": 17.537396121883656, "grad_norm": 0.26482293009757996, "learning_rate": 2.8506e-05, "loss": 0.0119, "step": 9505 }, { "epoch": 17.539242843951985, "grad_norm": 0.21488991379737854, "learning_rate": 2.8509e-05, "loss": 0.0078, "step": 9506 }, { "epoch": 17.541089566020315, "grad_norm": 0.6642892360687256, "learning_rate": 2.8512e-05, "loss": 0.0124, "step": 9507 }, { "epoch": 17.542936288088644, "grad_norm": 0.669625461101532, "learning_rate": 2.8515e-05, "loss": 0.0181, "step": 9508 }, { "epoch": 17.544783010156973, "grad_norm": 0.5279157161712646, "learning_rate": 2.8518e-05, "loss": 0.0132, "step": 9509 }, { "epoch": 17.5466297322253, "grad_norm": 0.320911705493927, "learning_rate": 2.8521e-05, "loss": 0.0156, "step": 9510 }, { "epoch": 17.548476454293628, "grad_norm": 0.6547080278396606, "learning_rate": 2.8524e-05, "loss": 0.0176, "step": 9511 }, { "epoch": 17.550323176361957, "grad_norm": 0.2697703242301941, "learning_rate": 2.8527e-05, "loss": 0.0144, "step": 9512 }, { "epoch": 17.552169898430286, "grad_norm": 0.33988258242607117, "learning_rate": 2.853e-05, "loss": 0.0069, "step": 9513 }, { "epoch": 17.554016620498615, "grad_norm": 0.9230872392654419, "learning_rate": 2.8533e-05, "loss": 0.0201, "step": 9514 }, { "epoch": 17.555863342566944, "grad_norm": 0.6196572780609131, "learning_rate": 2.8536000000000003e-05, "loss": 0.1457, "step": 9515 }, { "epoch": 17.557710064635273, "grad_norm": 0.86606365442276, "learning_rate": 2.8539000000000002e-05, "loss": 0.1609, "step": 9516 }, { "epoch": 17.559556786703602, "grad_norm": 0.7880556583404541, "learning_rate": 2.8542000000000002e-05, "loss": 0.1303, "step": 9517 }, { "epoch": 17.56140350877193, "grad_norm": 0.5721180438995361, "learning_rate": 2.8545000000000002e-05, "loss": 0.0967, "step": 9518 }, { "epoch": 17.563250230840257, "grad_norm": 0.5649641752243042, "learning_rate": 2.8548000000000002e-05, "loss": 0.1203, "step": 9519 }, { "epoch": 17.565096952908586, "grad_norm": 0.49527832865715027, "learning_rate": 2.8551e-05, "loss": 0.0973, "step": 9520 }, { "epoch": 17.566943674976915, "grad_norm": 0.4757789373397827, "learning_rate": 2.8554e-05, "loss": 0.0727, "step": 9521 }, { "epoch": 17.568790397045245, "grad_norm": 0.4874994456768036, "learning_rate": 2.8557e-05, "loss": 0.106, "step": 9522 }, { "epoch": 17.570637119113574, "grad_norm": 0.76370769739151, "learning_rate": 2.856e-05, "loss": 0.0565, "step": 9523 }, { "epoch": 17.572483841181903, "grad_norm": 0.6513908505439758, "learning_rate": 2.8563e-05, "loss": 0.0506, "step": 9524 }, { "epoch": 17.574330563250232, "grad_norm": 0.4472300410270691, "learning_rate": 2.8566e-05, "loss": 0.0472, "step": 9525 }, { "epoch": 17.57617728531856, "grad_norm": 0.5632028579711914, "learning_rate": 2.8569e-05, "loss": 0.0625, "step": 9526 }, { "epoch": 17.578024007386887, "grad_norm": 0.5988933444023132, "learning_rate": 2.8572e-05, "loss": 0.0989, "step": 9527 }, { "epoch": 17.579870729455216, "grad_norm": 0.8819764256477356, "learning_rate": 2.8575e-05, "loss": 0.0573, "step": 9528 }, { "epoch": 17.581717451523545, "grad_norm": 0.9006199836730957, "learning_rate": 2.8578e-05, "loss": 0.038, "step": 9529 }, { "epoch": 17.583564173591874, "grad_norm": 0.674299955368042, "learning_rate": 2.8581e-05, "loss": 0.0131, "step": 9530 }, { "epoch": 17.585410895660203, "grad_norm": 0.38229405879974365, "learning_rate": 2.8584e-05, "loss": 0.02, "step": 9531 }, { "epoch": 17.587257617728532, "grad_norm": 1.4004647731781006, "learning_rate": 2.8587e-05, "loss": 0.0299, "step": 9532 }, { "epoch": 17.58910433979686, "grad_norm": 0.3553142547607422, "learning_rate": 2.859e-05, "loss": 0.0347, "step": 9533 }, { "epoch": 17.59095106186519, "grad_norm": 0.2724413573741913, "learning_rate": 2.8593e-05, "loss": 0.0148, "step": 9534 }, { "epoch": 17.592797783933516, "grad_norm": 0.7119773626327515, "learning_rate": 2.8596000000000002e-05, "loss": 0.0416, "step": 9535 }, { "epoch": 17.594644506001845, "grad_norm": 0.44424229860305786, "learning_rate": 2.8599000000000002e-05, "loss": 0.0197, "step": 9536 }, { "epoch": 17.596491228070175, "grad_norm": 0.2880791425704956, "learning_rate": 2.8602e-05, "loss": 0.009, "step": 9537 }, { "epoch": 17.598337950138504, "grad_norm": 0.3062593936920166, "learning_rate": 2.8605e-05, "loss": 0.0145, "step": 9538 }, { "epoch": 17.600184672206833, "grad_norm": 0.31058281660079956, "learning_rate": 2.8608e-05, "loss": 0.0128, "step": 9539 }, { "epoch": 17.602031394275162, "grad_norm": 0.4950855076313019, "learning_rate": 2.8611e-05, "loss": 0.0419, "step": 9540 }, { "epoch": 17.60387811634349, "grad_norm": 0.16567905247211456, "learning_rate": 2.8614e-05, "loss": 0.0425, "step": 9541 }, { "epoch": 17.60572483841182, "grad_norm": 0.3481304943561554, "learning_rate": 2.8617e-05, "loss": 0.0199, "step": 9542 }, { "epoch": 17.607571560480146, "grad_norm": 0.20209775865077972, "learning_rate": 2.862e-05, "loss": 0.0083, "step": 9543 }, { "epoch": 17.609418282548475, "grad_norm": 0.29109519720077515, "learning_rate": 2.8623e-05, "loss": 0.0081, "step": 9544 }, { "epoch": 17.611265004616804, "grad_norm": 0.47210419178009033, "learning_rate": 2.8626000000000003e-05, "loss": 0.0224, "step": 9545 }, { "epoch": 17.613111726685133, "grad_norm": 0.3226254880428314, "learning_rate": 2.8629000000000003e-05, "loss": 0.015, "step": 9546 }, { "epoch": 17.614958448753463, "grad_norm": 0.3622691035270691, "learning_rate": 2.8632000000000003e-05, "loss": 0.0182, "step": 9547 }, { "epoch": 17.61680517082179, "grad_norm": 0.4353501498699188, "learning_rate": 2.8635000000000003e-05, "loss": 0.0168, "step": 9548 }, { "epoch": 17.61865189289012, "grad_norm": 0.2856418192386627, "learning_rate": 2.8638e-05, "loss": 0.0134, "step": 9549 }, { "epoch": 17.62049861495845, "grad_norm": 0.3683633506298065, "learning_rate": 2.8641e-05, "loss": 0.0168, "step": 9550 }, { "epoch": 17.62234533702678, "grad_norm": 0.36736348271369934, "learning_rate": 2.8644e-05, "loss": 0.0121, "step": 9551 }, { "epoch": 17.624192059095105, "grad_norm": 0.5249360203742981, "learning_rate": 2.8647e-05, "loss": 0.0149, "step": 9552 }, { "epoch": 17.626038781163434, "grad_norm": 0.4789264500141144, "learning_rate": 2.865e-05, "loss": 0.0119, "step": 9553 }, { "epoch": 17.627885503231763, "grad_norm": 0.6176832914352417, "learning_rate": 2.8652999999999998e-05, "loss": 0.0246, "step": 9554 }, { "epoch": 17.629732225300092, "grad_norm": 0.562190592288971, "learning_rate": 2.8656e-05, "loss": 0.0178, "step": 9555 }, { "epoch": 17.63157894736842, "grad_norm": 0.6893006563186646, "learning_rate": 2.8659e-05, "loss": 0.0157, "step": 9556 }, { "epoch": 17.63342566943675, "grad_norm": 0.7275405526161194, "learning_rate": 2.8662e-05, "loss": 0.0269, "step": 9557 }, { "epoch": 17.63527239150508, "grad_norm": 0.494080126285553, "learning_rate": 2.8665e-05, "loss": 0.012, "step": 9558 }, { "epoch": 17.63711911357341, "grad_norm": 0.32813942432403564, "learning_rate": 2.8668e-05, "loss": 0.0185, "step": 9559 }, { "epoch": 17.638965835641734, "grad_norm": 0.3075019121170044, "learning_rate": 2.8671e-05, "loss": 0.0114, "step": 9560 }, { "epoch": 17.640812557710063, "grad_norm": 0.9370800852775574, "learning_rate": 2.8674e-05, "loss": 0.0175, "step": 9561 }, { "epoch": 17.642659279778393, "grad_norm": 0.16680116951465607, "learning_rate": 2.8677e-05, "loss": 0.0043, "step": 9562 }, { "epoch": 17.64450600184672, "grad_norm": 0.4168938994407654, "learning_rate": 2.868e-05, "loss": 0.009, "step": 9563 }, { "epoch": 17.64635272391505, "grad_norm": 0.3269334137439728, "learning_rate": 2.8683e-05, "loss": 0.0097, "step": 9564 }, { "epoch": 17.64819944598338, "grad_norm": 1.061153531074524, "learning_rate": 2.8686000000000003e-05, "loss": 0.2211, "step": 9565 }, { "epoch": 17.65004616805171, "grad_norm": 0.823059618473053, "learning_rate": 2.8689000000000003e-05, "loss": 0.1503, "step": 9566 }, { "epoch": 17.65189289012004, "grad_norm": 0.634117066860199, "learning_rate": 2.8692000000000002e-05, "loss": 0.0934, "step": 9567 }, { "epoch": 17.653739612188367, "grad_norm": 0.6172256469726562, "learning_rate": 2.8695000000000002e-05, "loss": 0.1134, "step": 9568 }, { "epoch": 17.655586334256693, "grad_norm": 0.7339869141578674, "learning_rate": 2.8698000000000002e-05, "loss": 0.1427, "step": 9569 }, { "epoch": 17.657433056325022, "grad_norm": 0.6254228353500366, "learning_rate": 2.8701000000000002e-05, "loss": 0.1562, "step": 9570 }, { "epoch": 17.65927977839335, "grad_norm": 0.6750670671463013, "learning_rate": 2.8704e-05, "loss": 0.0843, "step": 9571 }, { "epoch": 17.66112650046168, "grad_norm": 0.6463918089866638, "learning_rate": 2.8707e-05, "loss": 0.0835, "step": 9572 }, { "epoch": 17.66297322253001, "grad_norm": 0.5513696074485779, "learning_rate": 2.871e-05, "loss": 0.0705, "step": 9573 }, { "epoch": 17.66481994459834, "grad_norm": 0.8210604190826416, "learning_rate": 2.8712999999999998e-05, "loss": 0.0536, "step": 9574 }, { "epoch": 17.666666666666668, "grad_norm": 0.5390028953552246, "learning_rate": 2.8716e-05, "loss": 0.0537, "step": 9575 }, { "epoch": 17.668513388734997, "grad_norm": 0.3856324255466461, "learning_rate": 2.8719e-05, "loss": 0.058, "step": 9576 }, { "epoch": 17.670360110803323, "grad_norm": 0.5079103708267212, "learning_rate": 2.8722e-05, "loss": 0.0483, "step": 9577 }, { "epoch": 17.67220683287165, "grad_norm": 0.4683472812175751, "learning_rate": 2.8725e-05, "loss": 0.0329, "step": 9578 }, { "epoch": 17.67405355493998, "grad_norm": 0.5738013982772827, "learning_rate": 2.8728e-05, "loss": 0.0681, "step": 9579 }, { "epoch": 17.67590027700831, "grad_norm": 0.5303285717964172, "learning_rate": 2.8731e-05, "loss": 0.039, "step": 9580 }, { "epoch": 17.67774699907664, "grad_norm": 0.2283354550600052, "learning_rate": 2.8734e-05, "loss": 0.0169, "step": 9581 }, { "epoch": 17.67959372114497, "grad_norm": 0.22478586435317993, "learning_rate": 2.8737e-05, "loss": 0.0121, "step": 9582 }, { "epoch": 17.681440443213297, "grad_norm": 0.22185231745243073, "learning_rate": 2.874e-05, "loss": 0.0164, "step": 9583 }, { "epoch": 17.683287165281627, "grad_norm": 0.29085099697113037, "learning_rate": 2.8743e-05, "loss": 0.0185, "step": 9584 }, { "epoch": 17.685133887349952, "grad_norm": 0.5266684293746948, "learning_rate": 2.8746000000000002e-05, "loss": 0.0191, "step": 9585 }, { "epoch": 17.68698060941828, "grad_norm": 0.20346860587596893, "learning_rate": 2.8749000000000002e-05, "loss": 0.0095, "step": 9586 }, { "epoch": 17.68882733148661, "grad_norm": 0.5799970626831055, "learning_rate": 2.8752000000000002e-05, "loss": 0.029, "step": 9587 }, { "epoch": 17.69067405355494, "grad_norm": 0.29750967025756836, "learning_rate": 2.8755e-05, "loss": 0.0166, "step": 9588 }, { "epoch": 17.69252077562327, "grad_norm": 0.2990019619464874, "learning_rate": 2.8758e-05, "loss": 0.0159, "step": 9589 }, { "epoch": 17.694367497691598, "grad_norm": 0.3774636685848236, "learning_rate": 2.8761e-05, "loss": 0.0129, "step": 9590 }, { "epoch": 17.696214219759927, "grad_norm": 0.2095613330602646, "learning_rate": 2.8764e-05, "loss": 0.0114, "step": 9591 }, { "epoch": 17.698060941828256, "grad_norm": 0.3799412250518799, "learning_rate": 2.8767e-05, "loss": 0.0167, "step": 9592 }, { "epoch": 17.69990766389658, "grad_norm": 0.29384300112724304, "learning_rate": 2.877e-05, "loss": 0.0128, "step": 9593 }, { "epoch": 17.70175438596491, "grad_norm": 0.5691142082214355, "learning_rate": 2.8773e-05, "loss": 0.0683, "step": 9594 }, { "epoch": 17.70360110803324, "grad_norm": 0.3275772035121918, "learning_rate": 2.8776000000000004e-05, "loss": 0.0142, "step": 9595 }, { "epoch": 17.70544783010157, "grad_norm": 0.27224838733673096, "learning_rate": 2.8779000000000003e-05, "loss": 0.0159, "step": 9596 }, { "epoch": 17.7072945521699, "grad_norm": 0.3701270520687103, "learning_rate": 2.8782000000000003e-05, "loss": 0.01, "step": 9597 }, { "epoch": 17.709141274238227, "grad_norm": 0.35283249616622925, "learning_rate": 2.8785e-05, "loss": 0.0122, "step": 9598 }, { "epoch": 17.710987996306557, "grad_norm": 0.28894442319869995, "learning_rate": 2.8788e-05, "loss": 0.0115, "step": 9599 }, { "epoch": 17.712834718374886, "grad_norm": 0.4092564582824707, "learning_rate": 2.8791e-05, "loss": 0.01, "step": 9600 }, { "epoch": 17.714681440443215, "grad_norm": 0.40917760133743286, "learning_rate": 2.8794e-05, "loss": 0.0087, "step": 9601 }, { "epoch": 17.71652816251154, "grad_norm": 0.26291561126708984, "learning_rate": 2.8797e-05, "loss": 0.0039, "step": 9602 }, { "epoch": 17.71837488457987, "grad_norm": 0.501798152923584, "learning_rate": 2.88e-05, "loss": 0.018, "step": 9603 }, { "epoch": 17.7202216066482, "grad_norm": 0.25262412428855896, "learning_rate": 2.8803e-05, "loss": 0.0089, "step": 9604 }, { "epoch": 17.722068328716528, "grad_norm": 0.5903002023696899, "learning_rate": 2.8806e-05, "loss": 0.0166, "step": 9605 }, { "epoch": 17.723915050784857, "grad_norm": 0.5660081505775452, "learning_rate": 2.8809e-05, "loss": 0.0163, "step": 9606 }, { "epoch": 17.725761772853186, "grad_norm": 0.5993664860725403, "learning_rate": 2.8812e-05, "loss": 0.0173, "step": 9607 }, { "epoch": 17.727608494921515, "grad_norm": 0.45771902799606323, "learning_rate": 2.8815e-05, "loss": 0.0205, "step": 9608 }, { "epoch": 17.729455216989845, "grad_norm": 0.3490065932273865, "learning_rate": 2.8818e-05, "loss": 0.0167, "step": 9609 }, { "epoch": 17.73130193905817, "grad_norm": 0.8391483426094055, "learning_rate": 2.8821e-05, "loss": 0.0231, "step": 9610 }, { "epoch": 17.7331486611265, "grad_norm": 0.3549315929412842, "learning_rate": 2.8824e-05, "loss": 0.0104, "step": 9611 }, { "epoch": 17.73499538319483, "grad_norm": 0.4805821478366852, "learning_rate": 2.8827e-05, "loss": 0.0121, "step": 9612 }, { "epoch": 17.736842105263158, "grad_norm": 0.6108695864677429, "learning_rate": 2.883e-05, "loss": 0.0255, "step": 9613 }, { "epoch": 17.738688827331487, "grad_norm": 0.6562829613685608, "learning_rate": 2.8833e-05, "loss": 0.0245, "step": 9614 }, { "epoch": 17.740535549399816, "grad_norm": 0.6468729376792908, "learning_rate": 2.8836000000000003e-05, "loss": 0.1794, "step": 9615 }, { "epoch": 17.742382271468145, "grad_norm": 0.5797168612480164, "learning_rate": 2.8839000000000003e-05, "loss": 0.1674, "step": 9616 }, { "epoch": 17.744228993536474, "grad_norm": 0.498214989900589, "learning_rate": 2.8842000000000003e-05, "loss": 0.1372, "step": 9617 }, { "epoch": 17.746075715604803, "grad_norm": 0.42887386679649353, "learning_rate": 2.8845000000000003e-05, "loss": 0.1131, "step": 9618 }, { "epoch": 17.74792243767313, "grad_norm": 0.4448026418685913, "learning_rate": 2.8848000000000002e-05, "loss": 0.0786, "step": 9619 }, { "epoch": 17.749769159741458, "grad_norm": 0.734890341758728, "learning_rate": 2.8851000000000002e-05, "loss": 0.0993, "step": 9620 }, { "epoch": 17.751615881809787, "grad_norm": 0.5033615827560425, "learning_rate": 2.8854000000000002e-05, "loss": 0.0839, "step": 9621 }, { "epoch": 17.753462603878116, "grad_norm": 0.42628228664398193, "learning_rate": 2.8857000000000002e-05, "loss": 0.054, "step": 9622 }, { "epoch": 17.755309325946445, "grad_norm": 0.46170705556869507, "learning_rate": 2.8859999999999998e-05, "loss": 0.0717, "step": 9623 }, { "epoch": 17.757156048014775, "grad_norm": 0.5042704939842224, "learning_rate": 2.8862999999999998e-05, "loss": 0.1043, "step": 9624 }, { "epoch": 17.759002770083104, "grad_norm": 0.5552589893341064, "learning_rate": 2.8866e-05, "loss": 0.0452, "step": 9625 }, { "epoch": 17.760849492151433, "grad_norm": 0.29673290252685547, "learning_rate": 2.8869e-05, "loss": 0.0418, "step": 9626 }, { "epoch": 17.76269621421976, "grad_norm": 0.5385515689849854, "learning_rate": 2.8872e-05, "loss": 0.0462, "step": 9627 }, { "epoch": 17.764542936288088, "grad_norm": 0.32758527994155884, "learning_rate": 2.8875e-05, "loss": 0.0458, "step": 9628 }, { "epoch": 17.766389658356417, "grad_norm": 0.2771989107131958, "learning_rate": 2.8878e-05, "loss": 0.0291, "step": 9629 }, { "epoch": 17.768236380424746, "grad_norm": 0.34781336784362793, "learning_rate": 2.8881e-05, "loss": 0.024, "step": 9630 }, { "epoch": 17.770083102493075, "grad_norm": 0.39459630846977234, "learning_rate": 2.8884e-05, "loss": 0.0178, "step": 9631 }, { "epoch": 17.771929824561404, "grad_norm": 0.34889426827430725, "learning_rate": 2.8887e-05, "loss": 0.027, "step": 9632 }, { "epoch": 17.773776546629733, "grad_norm": 0.21631485223770142, "learning_rate": 2.889e-05, "loss": 0.0182, "step": 9633 }, { "epoch": 17.775623268698062, "grad_norm": 0.24801243841648102, "learning_rate": 2.8893e-05, "loss": 0.034, "step": 9634 }, { "epoch": 17.777469990766388, "grad_norm": 0.270134836435318, "learning_rate": 2.8896e-05, "loss": 0.0091, "step": 9635 }, { "epoch": 17.779316712834717, "grad_norm": 0.594982922077179, "learning_rate": 2.8899000000000002e-05, "loss": 0.0151, "step": 9636 }, { "epoch": 17.781163434903046, "grad_norm": 0.6876816749572754, "learning_rate": 2.8902000000000002e-05, "loss": 0.0184, "step": 9637 }, { "epoch": 17.783010156971375, "grad_norm": 0.3381214439868927, "learning_rate": 2.8905000000000002e-05, "loss": 0.0071, "step": 9638 }, { "epoch": 17.784856879039705, "grad_norm": 0.4301603436470032, "learning_rate": 2.8908000000000002e-05, "loss": 0.0143, "step": 9639 }, { "epoch": 17.786703601108034, "grad_norm": 0.40740901231765747, "learning_rate": 2.8911e-05, "loss": 0.0147, "step": 9640 }, { "epoch": 17.788550323176363, "grad_norm": 0.5560108423233032, "learning_rate": 2.8914e-05, "loss": 0.0196, "step": 9641 }, { "epoch": 17.790397045244692, "grad_norm": 0.18544799089431763, "learning_rate": 2.8917e-05, "loss": 0.0083, "step": 9642 }, { "epoch": 17.792243767313018, "grad_norm": 0.36729657649993896, "learning_rate": 2.892e-05, "loss": 0.0136, "step": 9643 }, { "epoch": 17.794090489381347, "grad_norm": 0.24889634549617767, "learning_rate": 2.8923e-05, "loss": 0.0356, "step": 9644 }, { "epoch": 17.795937211449676, "grad_norm": 0.26268625259399414, "learning_rate": 2.8926e-05, "loss": 0.0085, "step": 9645 }, { "epoch": 17.797783933518005, "grad_norm": 0.1501934677362442, "learning_rate": 2.8929000000000004e-05, "loss": 0.0075, "step": 9646 }, { "epoch": 17.799630655586334, "grad_norm": 0.3604651391506195, "learning_rate": 2.8932e-05, "loss": 0.0234, "step": 9647 }, { "epoch": 17.801477377654663, "grad_norm": 0.25846487283706665, "learning_rate": 2.8935e-05, "loss": 0.0105, "step": 9648 }, { "epoch": 17.803324099722992, "grad_norm": 0.29089727997779846, "learning_rate": 2.8938e-05, "loss": 0.0067, "step": 9649 }, { "epoch": 17.80517082179132, "grad_norm": 0.32083141803741455, "learning_rate": 2.8941e-05, "loss": 0.0068, "step": 9650 }, { "epoch": 17.80701754385965, "grad_norm": 0.5071612000465393, "learning_rate": 2.8944e-05, "loss": 0.0216, "step": 9651 }, { "epoch": 17.808864265927976, "grad_norm": 0.4766952097415924, "learning_rate": 2.8947e-05, "loss": 0.0446, "step": 9652 }, { "epoch": 17.810710987996305, "grad_norm": 0.32992035150527954, "learning_rate": 2.895e-05, "loss": 0.0128, "step": 9653 }, { "epoch": 17.812557710064635, "grad_norm": 0.2831116020679474, "learning_rate": 2.8953e-05, "loss": 0.0113, "step": 9654 }, { "epoch": 17.814404432132964, "grad_norm": 0.45260798931121826, "learning_rate": 2.8956e-05, "loss": 0.0155, "step": 9655 }, { "epoch": 17.816251154201293, "grad_norm": 0.46074360609054565, "learning_rate": 2.8959000000000002e-05, "loss": 0.0217, "step": 9656 }, { "epoch": 17.818097876269622, "grad_norm": 1.0912634134292603, "learning_rate": 2.8962e-05, "loss": 0.015, "step": 9657 }, { "epoch": 17.81994459833795, "grad_norm": 0.34561482071876526, "learning_rate": 2.8965e-05, "loss": 0.0103, "step": 9658 }, { "epoch": 17.82179132040628, "grad_norm": 0.4716539978981018, "learning_rate": 2.8968e-05, "loss": 0.0138, "step": 9659 }, { "epoch": 17.823638042474606, "grad_norm": 0.5009711980819702, "learning_rate": 2.8971e-05, "loss": 0.0166, "step": 9660 }, { "epoch": 17.825484764542935, "grad_norm": 0.3603162169456482, "learning_rate": 2.8974e-05, "loss": 0.0167, "step": 9661 }, { "epoch": 17.827331486611264, "grad_norm": 0.4746507406234741, "learning_rate": 2.8977e-05, "loss": 0.0149, "step": 9662 }, { "epoch": 17.829178208679593, "grad_norm": 0.6108738780021667, "learning_rate": 2.898e-05, "loss": 0.0243, "step": 9663 }, { "epoch": 17.831024930747922, "grad_norm": 0.5035948753356934, "learning_rate": 2.8983e-05, "loss": 0.0122, "step": 9664 }, { "epoch": 17.83287165281625, "grad_norm": 0.535803496837616, "learning_rate": 2.8986e-05, "loss": 0.1685, "step": 9665 }, { "epoch": 17.83471837488458, "grad_norm": 0.7792430520057678, "learning_rate": 2.8989000000000003e-05, "loss": 0.1759, "step": 9666 }, { "epoch": 17.83656509695291, "grad_norm": 0.44851070642471313, "learning_rate": 2.8992000000000003e-05, "loss": 0.1387, "step": 9667 }, { "epoch": 17.83841181902124, "grad_norm": 0.531370222568512, "learning_rate": 2.8995000000000003e-05, "loss": 0.1089, "step": 9668 }, { "epoch": 17.840258541089565, "grad_norm": 0.4909587800502777, "learning_rate": 2.8998000000000003e-05, "loss": 0.1063, "step": 9669 }, { "epoch": 17.842105263157894, "grad_norm": 0.4452713429927826, "learning_rate": 2.9001000000000002e-05, "loss": 0.088, "step": 9670 }, { "epoch": 17.843951985226223, "grad_norm": 1.0999737977981567, "learning_rate": 2.9004000000000002e-05, "loss": 0.1129, "step": 9671 }, { "epoch": 17.845798707294552, "grad_norm": 0.7121695280075073, "learning_rate": 2.9007e-05, "loss": 0.1001, "step": 9672 }, { "epoch": 17.84764542936288, "grad_norm": 0.39976316690444946, "learning_rate": 2.901e-05, "loss": 0.0658, "step": 9673 }, { "epoch": 17.84949215143121, "grad_norm": 0.4128870964050293, "learning_rate": 2.9012999999999998e-05, "loss": 0.0482, "step": 9674 }, { "epoch": 17.85133887349954, "grad_norm": 0.9304009079933167, "learning_rate": 2.9015999999999998e-05, "loss": 0.0511, "step": 9675 }, { "epoch": 17.85318559556787, "grad_norm": 1.323746919631958, "learning_rate": 2.9019e-05, "loss": 0.0375, "step": 9676 }, { "epoch": 17.855032317636194, "grad_norm": 0.467035174369812, "learning_rate": 2.9022e-05, "loss": 0.039, "step": 9677 }, { "epoch": 17.856879039704523, "grad_norm": 0.37897157669067383, "learning_rate": 2.9025e-05, "loss": 0.0258, "step": 9678 }, { "epoch": 17.858725761772853, "grad_norm": 0.4991145730018616, "learning_rate": 2.9028e-05, "loss": 0.033, "step": 9679 }, { "epoch": 17.86057248384118, "grad_norm": 0.44403761625289917, "learning_rate": 2.9031e-05, "loss": 0.0419, "step": 9680 }, { "epoch": 17.86241920590951, "grad_norm": 0.37267547845840454, "learning_rate": 2.9034e-05, "loss": 0.0222, "step": 9681 }, { "epoch": 17.86426592797784, "grad_norm": 0.31096506118774414, "learning_rate": 2.9037e-05, "loss": 0.0163, "step": 9682 }, { "epoch": 17.86611265004617, "grad_norm": 0.3130188584327698, "learning_rate": 2.904e-05, "loss": 0.0174, "step": 9683 }, { "epoch": 17.8679593721145, "grad_norm": 0.22604769468307495, "learning_rate": 2.9043e-05, "loss": 0.0079, "step": 9684 }, { "epoch": 17.869806094182824, "grad_norm": 0.35768184065818787, "learning_rate": 2.9046e-05, "loss": 0.0237, "step": 9685 }, { "epoch": 17.871652816251153, "grad_norm": 0.2845885753631592, "learning_rate": 2.9049000000000003e-05, "loss": 0.0148, "step": 9686 }, { "epoch": 17.873499538319482, "grad_norm": 0.5197261571884155, "learning_rate": 2.9052000000000002e-05, "loss": 0.0163, "step": 9687 }, { "epoch": 17.87534626038781, "grad_norm": 0.3724684715270996, "learning_rate": 2.9055000000000002e-05, "loss": 0.0166, "step": 9688 }, { "epoch": 17.87719298245614, "grad_norm": 0.3093060851097107, "learning_rate": 2.9058000000000002e-05, "loss": 0.0115, "step": 9689 }, { "epoch": 17.87903970452447, "grad_norm": 0.5066264867782593, "learning_rate": 2.9061000000000002e-05, "loss": 0.023, "step": 9690 }, { "epoch": 17.8808864265928, "grad_norm": 0.32835444808006287, "learning_rate": 2.9064e-05, "loss": 0.0149, "step": 9691 }, { "epoch": 17.882733148661128, "grad_norm": 0.23925748467445374, "learning_rate": 2.9067e-05, "loss": 0.008, "step": 9692 }, { "epoch": 17.884579870729453, "grad_norm": 0.21648789942264557, "learning_rate": 2.907e-05, "loss": 0.0075, "step": 9693 }, { "epoch": 17.886426592797783, "grad_norm": 0.3470613360404968, "learning_rate": 2.9073e-05, "loss": 0.015, "step": 9694 }, { "epoch": 17.88827331486611, "grad_norm": 0.2070864886045456, "learning_rate": 2.9076e-05, "loss": 0.0048, "step": 9695 }, { "epoch": 17.89012003693444, "grad_norm": 0.48686373233795166, "learning_rate": 2.9079e-05, "loss": 0.0105, "step": 9696 }, { "epoch": 17.89196675900277, "grad_norm": 0.29262134432792664, "learning_rate": 2.9082e-05, "loss": 0.0092, "step": 9697 }, { "epoch": 17.8938134810711, "grad_norm": 0.3679444193840027, "learning_rate": 2.9085e-05, "loss": 0.0098, "step": 9698 }, { "epoch": 17.89566020313943, "grad_norm": 0.224870502948761, "learning_rate": 2.9088e-05, "loss": 0.0052, "step": 9699 }, { "epoch": 17.897506925207757, "grad_norm": 0.35356229543685913, "learning_rate": 2.9091e-05, "loss": 0.0115, "step": 9700 }, { "epoch": 17.899353647276087, "grad_norm": 0.3993721902370453, "learning_rate": 2.9094e-05, "loss": 0.0201, "step": 9701 }, { "epoch": 17.901200369344412, "grad_norm": 0.2684133052825928, "learning_rate": 2.9097e-05, "loss": 0.0096, "step": 9702 }, { "epoch": 17.90304709141274, "grad_norm": 0.3427678346633911, "learning_rate": 2.91e-05, "loss": 0.0139, "step": 9703 }, { "epoch": 17.90489381348107, "grad_norm": 0.36482563614845276, "learning_rate": 2.9103e-05, "loss": 0.0096, "step": 9704 }, { "epoch": 17.9067405355494, "grad_norm": 0.40752437710762024, "learning_rate": 2.9106e-05, "loss": 0.014, "step": 9705 }, { "epoch": 17.90858725761773, "grad_norm": 1.0441960096359253, "learning_rate": 2.9109000000000002e-05, "loss": 0.0208, "step": 9706 }, { "epoch": 17.910433979686058, "grad_norm": 0.2949042320251465, "learning_rate": 2.9112000000000002e-05, "loss": 0.0083, "step": 9707 }, { "epoch": 17.912280701754387, "grad_norm": 0.2150924652814865, "learning_rate": 2.9115e-05, "loss": 0.0062, "step": 9708 }, { "epoch": 17.914127423822716, "grad_norm": 0.8675482869148254, "learning_rate": 2.9118e-05, "loss": 0.018, "step": 9709 }, { "epoch": 17.91597414589104, "grad_norm": 0.7008863091468811, "learning_rate": 2.9121e-05, "loss": 0.0155, "step": 9710 }, { "epoch": 17.91782086795937, "grad_norm": 0.4291934370994568, "learning_rate": 2.9124e-05, "loss": 0.0142, "step": 9711 }, { "epoch": 17.9196675900277, "grad_norm": 0.8048538565635681, "learning_rate": 2.9127e-05, "loss": 0.0225, "step": 9712 }, { "epoch": 17.92151431209603, "grad_norm": 0.35469120740890503, "learning_rate": 2.913e-05, "loss": 0.0074, "step": 9713 }, { "epoch": 17.92336103416436, "grad_norm": 0.8907756209373474, "learning_rate": 2.9133e-05, "loss": 0.0212, "step": 9714 }, { "epoch": 17.925207756232687, "grad_norm": 1.0798656940460205, "learning_rate": 2.9136e-05, "loss": 0.1914, "step": 9715 }, { "epoch": 17.927054478301017, "grad_norm": 0.8650581240653992, "learning_rate": 2.9139000000000003e-05, "loss": 0.1628, "step": 9716 }, { "epoch": 17.928901200369346, "grad_norm": 0.8818985819816589, "learning_rate": 2.9142000000000003e-05, "loss": 0.1243, "step": 9717 }, { "epoch": 17.930747922437675, "grad_norm": 0.9086804986000061, "learning_rate": 2.9145000000000003e-05, "loss": 0.1201, "step": 9718 }, { "epoch": 17.932594644506, "grad_norm": 0.5983270406723022, "learning_rate": 2.9148000000000003e-05, "loss": 0.0855, "step": 9719 }, { "epoch": 17.93444136657433, "grad_norm": 0.6448835730552673, "learning_rate": 2.9151000000000003e-05, "loss": 0.0799, "step": 9720 }, { "epoch": 17.93628808864266, "grad_norm": 0.9719138145446777, "learning_rate": 2.9154e-05, "loss": 0.1022, "step": 9721 }, { "epoch": 17.938134810710988, "grad_norm": 0.4985361397266388, "learning_rate": 2.9157e-05, "loss": 0.0639, "step": 9722 }, { "epoch": 17.939981532779317, "grad_norm": 0.5949772000312805, "learning_rate": 2.916e-05, "loss": 0.0661, "step": 9723 }, { "epoch": 17.941828254847646, "grad_norm": 0.4565816819667816, "learning_rate": 2.9163e-05, "loss": 0.0676, "step": 9724 }, { "epoch": 17.943674976915975, "grad_norm": 0.7509490847587585, "learning_rate": 2.9165999999999998e-05, "loss": 0.0749, "step": 9725 }, { "epoch": 17.945521698984304, "grad_norm": 0.3464672565460205, "learning_rate": 2.9169e-05, "loss": 0.0221, "step": 9726 }, { "epoch": 17.94736842105263, "grad_norm": 0.39118891954421997, "learning_rate": 2.9172e-05, "loss": 0.0294, "step": 9727 }, { "epoch": 17.94921514312096, "grad_norm": 0.7514564394950867, "learning_rate": 2.9175e-05, "loss": 0.0638, "step": 9728 }, { "epoch": 17.95106186518929, "grad_norm": 0.35066482424736023, "learning_rate": 2.9178e-05, "loss": 0.025, "step": 9729 }, { "epoch": 17.952908587257618, "grad_norm": 0.4681917130947113, "learning_rate": 2.9181e-05, "loss": 0.0182, "step": 9730 }, { "epoch": 17.954755309325947, "grad_norm": 0.44393619894981384, "learning_rate": 2.9184e-05, "loss": 0.0193, "step": 9731 }, { "epoch": 17.956602031394276, "grad_norm": 0.31914278864860535, "learning_rate": 2.9187e-05, "loss": 0.0113, "step": 9732 }, { "epoch": 17.958448753462605, "grad_norm": 0.23968352377414703, "learning_rate": 2.919e-05, "loss": 0.0123, "step": 9733 }, { "epoch": 17.960295475530934, "grad_norm": 0.5183699131011963, "learning_rate": 2.9193e-05, "loss": 0.0124, "step": 9734 }, { "epoch": 17.96214219759926, "grad_norm": 0.19911415874958038, "learning_rate": 2.9196e-05, "loss": 0.008, "step": 9735 }, { "epoch": 17.96398891966759, "grad_norm": 0.5218525528907776, "learning_rate": 2.9199000000000003e-05, "loss": 0.0245, "step": 9736 }, { "epoch": 17.965835641735918, "grad_norm": 0.25067374110221863, "learning_rate": 2.9202000000000003e-05, "loss": 0.0136, "step": 9737 }, { "epoch": 17.967682363804247, "grad_norm": 0.3637305796146393, "learning_rate": 2.9205000000000002e-05, "loss": 0.0185, "step": 9738 }, { "epoch": 17.969529085872576, "grad_norm": 0.6495672464370728, "learning_rate": 2.9208000000000002e-05, "loss": 0.0206, "step": 9739 }, { "epoch": 17.971375807940905, "grad_norm": 0.589525043964386, "learning_rate": 2.9211000000000002e-05, "loss": 0.0179, "step": 9740 }, { "epoch": 17.973222530009235, "grad_norm": 0.46126195788383484, "learning_rate": 2.9214000000000002e-05, "loss": 0.0257, "step": 9741 }, { "epoch": 17.975069252077564, "grad_norm": 0.24324364960193634, "learning_rate": 2.9217e-05, "loss": 0.0114, "step": 9742 }, { "epoch": 17.97691597414589, "grad_norm": 0.44025933742523193, "learning_rate": 2.922e-05, "loss": 0.0161, "step": 9743 }, { "epoch": 17.97876269621422, "grad_norm": 0.3250148892402649, "learning_rate": 2.9223e-05, "loss": 0.0143, "step": 9744 }, { "epoch": 17.980609418282548, "grad_norm": 0.5272476673126221, "learning_rate": 2.9226e-05, "loss": 0.0188, "step": 9745 }, { "epoch": 17.982456140350877, "grad_norm": 2.31430983543396, "learning_rate": 2.9229e-05, "loss": 0.0309, "step": 9746 }, { "epoch": 17.984302862419206, "grad_norm": 0.20313288271427155, "learning_rate": 2.9232e-05, "loss": 0.0054, "step": 9747 }, { "epoch": 17.986149584487535, "grad_norm": 0.17994390428066254, "learning_rate": 2.9235e-05, "loss": 0.0069, "step": 9748 }, { "epoch": 17.987996306555864, "grad_norm": 0.675997257232666, "learning_rate": 2.9238e-05, "loss": 0.0066, "step": 9749 }, { "epoch": 17.989843028624193, "grad_norm": 0.5051863193511963, "learning_rate": 2.9241e-05, "loss": 0.0153, "step": 9750 }, { "epoch": 17.991689750692522, "grad_norm": 0.46311643719673157, "learning_rate": 2.9244e-05, "loss": 0.0179, "step": 9751 }, { "epoch": 17.993536472760848, "grad_norm": 0.38243719935417175, "learning_rate": 2.9247e-05, "loss": 0.0125, "step": 9752 }, { "epoch": 17.995383194829177, "grad_norm": 0.46555525064468384, "learning_rate": 2.925e-05, "loss": 0.034, "step": 9753 }, { "epoch": 17.997229916897506, "grad_norm": 0.41126081347465515, "learning_rate": 2.9253e-05, "loss": 0.0154, "step": 9754 }, { "epoch": 17.999076638965835, "grad_norm": 0.7898223996162415, "learning_rate": 2.9256e-05, "loss": 0.0344, "step": 9755 }, { "epoch": 18.0, "grad_norm": 0.8481200933456421, "learning_rate": 2.9259e-05, "loss": 0.0256, "step": 9756 }, { "epoch": 18.00184672206833, "grad_norm": 0.9342116713523865, "learning_rate": 2.9262000000000002e-05, "loss": 0.2025, "step": 9757 }, { "epoch": 18.00369344413666, "grad_norm": 0.6150652170181274, "learning_rate": 2.9265000000000002e-05, "loss": 0.1379, "step": 9758 }, { "epoch": 18.005540166204987, "grad_norm": 0.7522546052932739, "learning_rate": 2.9268e-05, "loss": 0.2386, "step": 9759 }, { "epoch": 18.007386888273317, "grad_norm": 1.3952076435089111, "learning_rate": 2.9271e-05, "loss": 0.1439, "step": 9760 }, { "epoch": 18.009233610341642, "grad_norm": 1.392382025718689, "learning_rate": 2.9274e-05, "loss": 0.1604, "step": 9761 }, { "epoch": 18.01108033240997, "grad_norm": 0.8652476072311401, "learning_rate": 2.9277e-05, "loss": 0.1213, "step": 9762 }, { "epoch": 18.0129270544783, "grad_norm": 0.510383665561676, "learning_rate": 2.928e-05, "loss": 0.0917, "step": 9763 }, { "epoch": 18.01477377654663, "grad_norm": 0.7107333540916443, "learning_rate": 2.9283e-05, "loss": 0.1355, "step": 9764 }, { "epoch": 18.01662049861496, "grad_norm": 0.5975661277770996, "learning_rate": 2.9286e-05, "loss": 0.0964, "step": 9765 }, { "epoch": 18.018467220683288, "grad_norm": 0.6261776089668274, "learning_rate": 2.9289e-05, "loss": 0.0617, "step": 9766 }, { "epoch": 18.020313942751617, "grad_norm": 0.3411554992198944, "learning_rate": 2.9292000000000003e-05, "loss": 0.0557, "step": 9767 }, { "epoch": 18.022160664819946, "grad_norm": 0.7305089235305786, "learning_rate": 2.9295000000000003e-05, "loss": 0.049, "step": 9768 }, { "epoch": 18.02400738688827, "grad_norm": 0.506694495677948, "learning_rate": 2.9298000000000003e-05, "loss": 0.0273, "step": 9769 }, { "epoch": 18.0258541089566, "grad_norm": 0.5924853682518005, "learning_rate": 2.9301e-05, "loss": 0.1094, "step": 9770 }, { "epoch": 18.02770083102493, "grad_norm": 0.35882166028022766, "learning_rate": 2.9304e-05, "loss": 0.0271, "step": 9771 }, { "epoch": 18.02954755309326, "grad_norm": 0.44903764128685, "learning_rate": 2.9307e-05, "loss": 0.0165, "step": 9772 }, { "epoch": 18.03139427516159, "grad_norm": 0.36786243319511414, "learning_rate": 2.931e-05, "loss": 0.0156, "step": 9773 }, { "epoch": 18.033240997229917, "grad_norm": 0.1827472448348999, "learning_rate": 2.9313e-05, "loss": 0.0079, "step": 9774 }, { "epoch": 18.035087719298247, "grad_norm": 0.36526259779930115, "learning_rate": 2.9316e-05, "loss": 0.0158, "step": 9775 }, { "epoch": 18.036934441366576, "grad_norm": 0.23194356262683868, "learning_rate": 2.9318999999999998e-05, "loss": 0.0098, "step": 9776 }, { "epoch": 18.0387811634349, "grad_norm": 0.6627047657966614, "learning_rate": 2.9322e-05, "loss": 0.0213, "step": 9777 }, { "epoch": 18.04062788550323, "grad_norm": 0.22040517628192902, "learning_rate": 2.9325e-05, "loss": 0.0105, "step": 9778 }, { "epoch": 18.04247460757156, "grad_norm": 0.223483607172966, "learning_rate": 2.9328e-05, "loss": 0.0093, "step": 9779 }, { "epoch": 18.04432132963989, "grad_norm": 0.6126001477241516, "learning_rate": 2.9331e-05, "loss": 0.0142, "step": 9780 }, { "epoch": 18.046168051708218, "grad_norm": 0.2615165114402771, "learning_rate": 2.9334e-05, "loss": 0.0172, "step": 9781 }, { "epoch": 18.048014773776547, "grad_norm": 0.3288552761077881, "learning_rate": 2.9337e-05, "loss": 0.0118, "step": 9782 }, { "epoch": 18.049861495844876, "grad_norm": 0.1888730674982071, "learning_rate": 2.934e-05, "loss": 0.009, "step": 9783 }, { "epoch": 18.051708217913205, "grad_norm": 0.3395959138870239, "learning_rate": 2.9343e-05, "loss": 0.0089, "step": 9784 }, { "epoch": 18.053554939981534, "grad_norm": 0.1359689086675644, "learning_rate": 2.9346e-05, "loss": 0.0043, "step": 9785 }, { "epoch": 18.05540166204986, "grad_norm": 0.3583788573741913, "learning_rate": 2.9349e-05, "loss": 0.0145, "step": 9786 }, { "epoch": 18.05724838411819, "grad_norm": 0.34380096197128296, "learning_rate": 2.9352000000000003e-05, "loss": 0.0089, "step": 9787 }, { "epoch": 18.05909510618652, "grad_norm": 0.2520500123500824, "learning_rate": 2.9355000000000003e-05, "loss": 0.0098, "step": 9788 }, { "epoch": 18.060941828254848, "grad_norm": 0.6030365824699402, "learning_rate": 2.9358000000000003e-05, "loss": 0.0146, "step": 9789 }, { "epoch": 18.062788550323177, "grad_norm": 0.29205048084259033, "learning_rate": 2.9361000000000002e-05, "loss": 0.004, "step": 9790 }, { "epoch": 18.064635272391506, "grad_norm": 0.3522581458091736, "learning_rate": 2.9364000000000002e-05, "loss": 0.0091, "step": 9791 }, { "epoch": 18.066481994459835, "grad_norm": 0.25486263632774353, "learning_rate": 2.9367000000000002e-05, "loss": 0.0066, "step": 9792 }, { "epoch": 18.068328716528164, "grad_norm": 0.2006990760564804, "learning_rate": 2.9370000000000002e-05, "loss": 0.012, "step": 9793 }, { "epoch": 18.07017543859649, "grad_norm": 0.19068430364131927, "learning_rate": 2.9373e-05, "loss": 0.0101, "step": 9794 }, { "epoch": 18.07202216066482, "grad_norm": 0.2882688045501709, "learning_rate": 2.9375999999999998e-05, "loss": 0.0086, "step": 9795 }, { "epoch": 18.073868882733148, "grad_norm": 0.27749103307724, "learning_rate": 2.9378999999999998e-05, "loss": 0.0172, "step": 9796 }, { "epoch": 18.075715604801477, "grad_norm": 0.29392507672309875, "learning_rate": 2.9382e-05, "loss": 0.0081, "step": 9797 }, { "epoch": 18.077562326869806, "grad_norm": 0.7844383120536804, "learning_rate": 2.9385e-05, "loss": 0.0278, "step": 9798 }, { "epoch": 18.079409048938135, "grad_norm": 0.18613365292549133, "learning_rate": 2.9388e-05, "loss": 0.0053, "step": 9799 }, { "epoch": 18.081255771006465, "grad_norm": 0.3608344793319702, "learning_rate": 2.9391e-05, "loss": 0.0109, "step": 9800 }, { "epoch": 18.083102493074794, "grad_norm": 0.32925644516944885, "learning_rate": 2.9394e-05, "loss": 0.008, "step": 9801 }, { "epoch": 18.08494921514312, "grad_norm": 0.21824046969413757, "learning_rate": 2.9397e-05, "loss": 0.0091, "step": 9802 }, { "epoch": 18.08679593721145, "grad_norm": 0.36572808027267456, "learning_rate": 2.94e-05, "loss": 0.0125, "step": 9803 }, { "epoch": 18.088642659279778, "grad_norm": 0.40578538179397583, "learning_rate": 2.9403e-05, "loss": 0.0148, "step": 9804 }, { "epoch": 18.090489381348107, "grad_norm": 0.4316546320915222, "learning_rate": 2.9406e-05, "loss": 0.0146, "step": 9805 }, { "epoch": 18.092336103416436, "grad_norm": 0.383820503950119, "learning_rate": 2.9409e-05, "loss": 0.007, "step": 9806 }, { "epoch": 18.094182825484765, "grad_norm": 0.9452648162841797, "learning_rate": 2.9412000000000002e-05, "loss": 0.2356, "step": 9807 }, { "epoch": 18.096029547553094, "grad_norm": 0.7241860628128052, "learning_rate": 2.9415000000000002e-05, "loss": 0.1892, "step": 9808 }, { "epoch": 18.097876269621423, "grad_norm": 1.6240252256393433, "learning_rate": 2.9418000000000002e-05, "loss": 0.0974, "step": 9809 }, { "epoch": 18.099722991689752, "grad_norm": 0.9428126811981201, "learning_rate": 2.9421000000000002e-05, "loss": 0.133, "step": 9810 }, { "epoch": 18.101569713758078, "grad_norm": 0.7279912233352661, "learning_rate": 2.9424e-05, "loss": 0.1401, "step": 9811 }, { "epoch": 18.103416435826407, "grad_norm": 0.5566812753677368, "learning_rate": 2.9427e-05, "loss": 0.0827, "step": 9812 }, { "epoch": 18.105263157894736, "grad_norm": 0.34377196431159973, "learning_rate": 2.943e-05, "loss": 0.0868, "step": 9813 }, { "epoch": 18.107109879963065, "grad_norm": 0.39494451880455017, "learning_rate": 2.9433e-05, "loss": 0.0655, "step": 9814 }, { "epoch": 18.108956602031395, "grad_norm": 0.5374976396560669, "learning_rate": 2.9436e-05, "loss": 0.0541, "step": 9815 }, { "epoch": 18.110803324099724, "grad_norm": 0.5303835272789001, "learning_rate": 2.9439e-05, "loss": 0.0617, "step": 9816 }, { "epoch": 18.112650046168053, "grad_norm": 0.4022885859012604, "learning_rate": 2.9442000000000004e-05, "loss": 0.0652, "step": 9817 }, { "epoch": 18.114496768236382, "grad_norm": 0.3138822019100189, "learning_rate": 2.9445000000000004e-05, "loss": 0.0368, "step": 9818 }, { "epoch": 18.116343490304708, "grad_norm": 0.4218757450580597, "learning_rate": 2.9448e-05, "loss": 0.0383, "step": 9819 }, { "epoch": 18.118190212373037, "grad_norm": 1.328559398651123, "learning_rate": 2.9451e-05, "loss": 0.0613, "step": 9820 }, { "epoch": 18.120036934441366, "grad_norm": 0.26926133036613464, "learning_rate": 2.9454e-05, "loss": 0.0326, "step": 9821 }, { "epoch": 18.121883656509695, "grad_norm": 0.36706602573394775, "learning_rate": 2.9457e-05, "loss": 0.0215, "step": 9822 }, { "epoch": 18.123730378578024, "grad_norm": 0.5583202838897705, "learning_rate": 2.946e-05, "loss": 0.0217, "step": 9823 }, { "epoch": 18.125577100646353, "grad_norm": 0.38190707564353943, "learning_rate": 2.9463e-05, "loss": 0.0223, "step": 9824 }, { "epoch": 18.127423822714682, "grad_norm": 0.4724488854408264, "learning_rate": 2.9466e-05, "loss": 0.0485, "step": 9825 }, { "epoch": 18.12927054478301, "grad_norm": 0.3241349458694458, "learning_rate": 2.9469e-05, "loss": 0.0307, "step": 9826 }, { "epoch": 18.131117266851337, "grad_norm": 0.12482421100139618, "learning_rate": 2.9472000000000002e-05, "loss": 0.0056, "step": 9827 }, { "epoch": 18.132963988919666, "grad_norm": 0.2672232985496521, "learning_rate": 2.9475e-05, "loss": 0.0254, "step": 9828 }, { "epoch": 18.134810710987995, "grad_norm": 0.27123939990997314, "learning_rate": 2.9478e-05, "loss": 0.0135, "step": 9829 }, { "epoch": 18.136657433056325, "grad_norm": 0.25711241364479065, "learning_rate": 2.9481e-05, "loss": 0.0118, "step": 9830 }, { "epoch": 18.138504155124654, "grad_norm": 0.540631115436554, "learning_rate": 2.9484e-05, "loss": 0.0175, "step": 9831 }, { "epoch": 18.140350877192983, "grad_norm": 0.517338216304779, "learning_rate": 2.9487e-05, "loss": 0.0123, "step": 9832 }, { "epoch": 18.142197599261312, "grad_norm": 0.19550421833992004, "learning_rate": 2.949e-05, "loss": 0.0072, "step": 9833 }, { "epoch": 18.14404432132964, "grad_norm": 0.24754598736763, "learning_rate": 2.9493e-05, "loss": 0.008, "step": 9834 }, { "epoch": 18.14589104339797, "grad_norm": 0.18133080005645752, "learning_rate": 2.9496e-05, "loss": 0.0108, "step": 9835 }, { "epoch": 18.147737765466296, "grad_norm": 0.27041253447532654, "learning_rate": 2.9499e-05, "loss": 0.0143, "step": 9836 }, { "epoch": 18.149584487534625, "grad_norm": 0.22081269323825836, "learning_rate": 2.9502000000000003e-05, "loss": 0.0075, "step": 9837 }, { "epoch": 18.151431209602954, "grad_norm": 0.37559419870376587, "learning_rate": 2.9505000000000003e-05, "loss": 0.0077, "step": 9838 }, { "epoch": 18.153277931671283, "grad_norm": 0.22016127407550812, "learning_rate": 2.9508000000000003e-05, "loss": 0.0108, "step": 9839 }, { "epoch": 18.155124653739612, "grad_norm": 0.26403334736824036, "learning_rate": 2.9511000000000003e-05, "loss": 0.0069, "step": 9840 }, { "epoch": 18.15697137580794, "grad_norm": 0.6012740731239319, "learning_rate": 2.9514000000000002e-05, "loss": 0.0201, "step": 9841 }, { "epoch": 18.15881809787627, "grad_norm": 0.3800670802593231, "learning_rate": 2.9517000000000002e-05, "loss": 0.0085, "step": 9842 }, { "epoch": 18.1606648199446, "grad_norm": 0.3758285343647003, "learning_rate": 2.9520000000000002e-05, "loss": 0.0101, "step": 9843 }, { "epoch": 18.162511542012926, "grad_norm": 0.2834491729736328, "learning_rate": 2.9523e-05, "loss": 0.0115, "step": 9844 }, { "epoch": 18.164358264081255, "grad_norm": 0.3003970682621002, "learning_rate": 2.9525999999999998e-05, "loss": 0.0082, "step": 9845 }, { "epoch": 18.166204986149584, "grad_norm": 0.4620857238769531, "learning_rate": 2.9528999999999998e-05, "loss": 0.0191, "step": 9846 }, { "epoch": 18.168051708217913, "grad_norm": 0.27000996470451355, "learning_rate": 2.9532e-05, "loss": 0.0079, "step": 9847 }, { "epoch": 18.169898430286242, "grad_norm": 0.33361855149269104, "learning_rate": 2.9535e-05, "loss": 0.007, "step": 9848 }, { "epoch": 18.17174515235457, "grad_norm": 0.3398788571357727, "learning_rate": 2.9538e-05, "loss": 0.0095, "step": 9849 }, { "epoch": 18.1735918744229, "grad_norm": 0.17393405735492706, "learning_rate": 2.9541e-05, "loss": 0.0032, "step": 9850 }, { "epoch": 18.17543859649123, "grad_norm": 0.3088524341583252, "learning_rate": 2.9544e-05, "loss": 0.0094, "step": 9851 }, { "epoch": 18.177285318559555, "grad_norm": 1.077933430671692, "learning_rate": 2.9547e-05, "loss": 0.0119, "step": 9852 }, { "epoch": 18.179132040627884, "grad_norm": 0.48251262307167053, "learning_rate": 2.955e-05, "loss": 0.0057, "step": 9853 }, { "epoch": 18.180978762696213, "grad_norm": 0.5218617916107178, "learning_rate": 2.9553e-05, "loss": 0.0137, "step": 9854 }, { "epoch": 18.182825484764543, "grad_norm": 0.6712053418159485, "learning_rate": 2.9556e-05, "loss": 0.0169, "step": 9855 }, { "epoch": 18.18467220683287, "grad_norm": 1.1995861530303955, "learning_rate": 2.9559e-05, "loss": 0.0415, "step": 9856 }, { "epoch": 18.1865189289012, "grad_norm": 0.5802194476127625, "learning_rate": 2.9562000000000003e-05, "loss": 0.1692, "step": 9857 }, { "epoch": 18.18836565096953, "grad_norm": 0.6861435174942017, "learning_rate": 2.9565000000000002e-05, "loss": 0.142, "step": 9858 }, { "epoch": 18.19021237303786, "grad_norm": 0.7514278888702393, "learning_rate": 2.9568000000000002e-05, "loss": 0.1626, "step": 9859 }, { "epoch": 18.19205909510619, "grad_norm": 0.5463578104972839, "learning_rate": 2.9571000000000002e-05, "loss": 0.0996, "step": 9860 }, { "epoch": 18.193905817174514, "grad_norm": 0.6229665875434875, "learning_rate": 2.9574000000000002e-05, "loss": 0.075, "step": 9861 }, { "epoch": 18.195752539242843, "grad_norm": 0.8171252012252808, "learning_rate": 2.9577e-05, "loss": 0.1555, "step": 9862 }, { "epoch": 18.197599261311172, "grad_norm": 0.4324992001056671, "learning_rate": 2.958e-05, "loss": 0.0936, "step": 9863 }, { "epoch": 18.1994459833795, "grad_norm": 0.7613241672515869, "learning_rate": 2.9583e-05, "loss": 0.077, "step": 9864 }, { "epoch": 18.20129270544783, "grad_norm": 0.3602001368999481, "learning_rate": 2.9586e-05, "loss": 0.0599, "step": 9865 }, { "epoch": 18.20313942751616, "grad_norm": 0.4885939359664917, "learning_rate": 2.9589e-05, "loss": 0.0588, "step": 9866 }, { "epoch": 18.20498614958449, "grad_norm": 0.5051184892654419, "learning_rate": 2.9592000000000004e-05, "loss": 0.061, "step": 9867 }, { "epoch": 18.206832871652818, "grad_norm": 0.34616658091545105, "learning_rate": 2.9595e-05, "loss": 0.0352, "step": 9868 }, { "epoch": 18.208679593721143, "grad_norm": 0.5483611226081848, "learning_rate": 2.9598e-05, "loss": 0.0965, "step": 9869 }, { "epoch": 18.210526315789473, "grad_norm": 0.45261475443840027, "learning_rate": 2.9601e-05, "loss": 0.0315, "step": 9870 }, { "epoch": 18.2123730378578, "grad_norm": 0.3057132363319397, "learning_rate": 2.9604e-05, "loss": 0.0235, "step": 9871 }, { "epoch": 18.21421975992613, "grad_norm": 0.20985980331897736, "learning_rate": 2.9607e-05, "loss": 0.0176, "step": 9872 }, { "epoch": 18.21606648199446, "grad_norm": 0.3141608238220215, "learning_rate": 2.961e-05, "loss": 0.0464, "step": 9873 }, { "epoch": 18.21791320406279, "grad_norm": 0.5170801281929016, "learning_rate": 2.9613e-05, "loss": 0.0201, "step": 9874 }, { "epoch": 18.21975992613112, "grad_norm": 0.8139154314994812, "learning_rate": 2.9616e-05, "loss": 0.0407, "step": 9875 }, { "epoch": 18.221606648199447, "grad_norm": 0.22715677320957184, "learning_rate": 2.9619e-05, "loss": 0.0155, "step": 9876 }, { "epoch": 18.223453370267773, "grad_norm": 0.33089739084243774, "learning_rate": 2.9622000000000002e-05, "loss": 0.0189, "step": 9877 }, { "epoch": 18.225300092336102, "grad_norm": 0.8802370429039001, "learning_rate": 2.9625000000000002e-05, "loss": 0.0154, "step": 9878 }, { "epoch": 18.22714681440443, "grad_norm": 0.2362622320652008, "learning_rate": 2.9628e-05, "loss": 0.0113, "step": 9879 }, { "epoch": 18.22899353647276, "grad_norm": 0.21023108065128326, "learning_rate": 2.9631e-05, "loss": 0.0074, "step": 9880 }, { "epoch": 18.23084025854109, "grad_norm": 0.39017900824546814, "learning_rate": 2.9634e-05, "loss": 0.011, "step": 9881 }, { "epoch": 18.23268698060942, "grad_norm": 0.31846851110458374, "learning_rate": 2.9637e-05, "loss": 0.0085, "step": 9882 }, { "epoch": 18.234533702677748, "grad_norm": 0.32058441638946533, "learning_rate": 2.964e-05, "loss": 0.0197, "step": 9883 }, { "epoch": 18.236380424746077, "grad_norm": 0.27096280455589294, "learning_rate": 2.9643e-05, "loss": 0.0448, "step": 9884 }, { "epoch": 18.238227146814406, "grad_norm": 0.28840863704681396, "learning_rate": 2.9646e-05, "loss": 0.012, "step": 9885 }, { "epoch": 18.24007386888273, "grad_norm": 0.2708560824394226, "learning_rate": 2.9649e-05, "loss": 0.0074, "step": 9886 }, { "epoch": 18.24192059095106, "grad_norm": 0.3992142677307129, "learning_rate": 2.9652e-05, "loss": 0.0113, "step": 9887 }, { "epoch": 18.24376731301939, "grad_norm": 0.2929539978504181, "learning_rate": 2.9655000000000003e-05, "loss": 0.0167, "step": 9888 }, { "epoch": 18.24561403508772, "grad_norm": 0.4625506103038788, "learning_rate": 2.9658000000000003e-05, "loss": 0.0103, "step": 9889 }, { "epoch": 18.24746075715605, "grad_norm": 0.2780413329601288, "learning_rate": 2.9661000000000003e-05, "loss": 0.0091, "step": 9890 }, { "epoch": 18.249307479224377, "grad_norm": 0.26614031195640564, "learning_rate": 2.9664000000000003e-05, "loss": 0.006, "step": 9891 }, { "epoch": 18.251154201292707, "grad_norm": 0.3119448125362396, "learning_rate": 2.9667000000000002e-05, "loss": 0.0069, "step": 9892 }, { "epoch": 18.253000923361036, "grad_norm": 0.4528767466545105, "learning_rate": 2.967e-05, "loss": 0.0091, "step": 9893 }, { "epoch": 18.25484764542936, "grad_norm": 0.24236981570720673, "learning_rate": 2.9673e-05, "loss": 0.0085, "step": 9894 }, { "epoch": 18.25669436749769, "grad_norm": 0.31961968541145325, "learning_rate": 2.9676e-05, "loss": 0.01, "step": 9895 }, { "epoch": 18.25854108956602, "grad_norm": 0.5327759385108948, "learning_rate": 2.9678999999999998e-05, "loss": 0.018, "step": 9896 }, { "epoch": 18.26038781163435, "grad_norm": 0.2756796181201935, "learning_rate": 2.9681999999999998e-05, "loss": 0.0066, "step": 9897 }, { "epoch": 18.262234533702678, "grad_norm": 0.2694704234600067, "learning_rate": 2.9685e-05, "loss": 0.0067, "step": 9898 }, { "epoch": 18.264081255771007, "grad_norm": 0.877261757850647, "learning_rate": 2.9688e-05, "loss": 0.0263, "step": 9899 }, { "epoch": 18.265927977839336, "grad_norm": 0.18148718774318695, "learning_rate": 2.9691e-05, "loss": 0.0065, "step": 9900 }, { "epoch": 18.267774699907665, "grad_norm": 0.17909610271453857, "learning_rate": 2.9694e-05, "loss": 0.0051, "step": 9901 }, { "epoch": 18.26962142197599, "grad_norm": 0.2902460992336273, "learning_rate": 2.9697e-05, "loss": 0.0084, "step": 9902 }, { "epoch": 18.27146814404432, "grad_norm": 0.45404717326164246, "learning_rate": 2.97e-05, "loss": 0.0147, "step": 9903 }, { "epoch": 18.27331486611265, "grad_norm": 0.3529004454612732, "learning_rate": 2.9703e-05, "loss": 0.0103, "step": 9904 }, { "epoch": 18.27516158818098, "grad_norm": 0.30562788248062134, "learning_rate": 2.9706e-05, "loss": 0.0074, "step": 9905 }, { "epoch": 18.277008310249307, "grad_norm": 0.3384891748428345, "learning_rate": 2.9709e-05, "loss": 0.0108, "step": 9906 }, { "epoch": 18.278855032317637, "grad_norm": 0.6173661351203918, "learning_rate": 2.9712e-05, "loss": 0.1869, "step": 9907 }, { "epoch": 18.280701754385966, "grad_norm": 0.5092833042144775, "learning_rate": 2.9715000000000003e-05, "loss": 0.1286, "step": 9908 }, { "epoch": 18.282548476454295, "grad_norm": 0.58086758852005, "learning_rate": 2.9718000000000002e-05, "loss": 0.1219, "step": 9909 }, { "epoch": 18.284395198522624, "grad_norm": 3.074801206588745, "learning_rate": 2.9721000000000002e-05, "loss": 0.1325, "step": 9910 }, { "epoch": 18.28624192059095, "grad_norm": 0.6098874807357788, "learning_rate": 2.9724000000000002e-05, "loss": 0.1242, "step": 9911 }, { "epoch": 18.28808864265928, "grad_norm": 0.6750804781913757, "learning_rate": 2.9727000000000002e-05, "loss": 0.049, "step": 9912 }, { "epoch": 18.289935364727608, "grad_norm": 0.7178696990013123, "learning_rate": 2.973e-05, "loss": 0.0865, "step": 9913 }, { "epoch": 18.291782086795937, "grad_norm": 0.43659573793411255, "learning_rate": 2.9733e-05, "loss": 0.0552, "step": 9914 }, { "epoch": 18.293628808864266, "grad_norm": 0.4207955002784729, "learning_rate": 2.9736e-05, "loss": 0.0653, "step": 9915 }, { "epoch": 18.295475530932595, "grad_norm": 0.3190794885158539, "learning_rate": 2.9739e-05, "loss": 0.0325, "step": 9916 }, { "epoch": 18.297322253000925, "grad_norm": 0.27810654044151306, "learning_rate": 2.9742e-05, "loss": 0.0386, "step": 9917 }, { "epoch": 18.299168975069254, "grad_norm": 0.6035851240158081, "learning_rate": 2.9745e-05, "loss": 0.0791, "step": 9918 }, { "epoch": 18.30101569713758, "grad_norm": 0.36111941933631897, "learning_rate": 2.9748e-05, "loss": 0.0429, "step": 9919 }, { "epoch": 18.30286241920591, "grad_norm": 0.2906637191772461, "learning_rate": 2.9751e-05, "loss": 0.0245, "step": 9920 }, { "epoch": 18.304709141274238, "grad_norm": 0.3560005724430084, "learning_rate": 2.9754e-05, "loss": 0.0273, "step": 9921 }, { "epoch": 18.306555863342567, "grad_norm": 0.3637906312942505, "learning_rate": 2.9757e-05, "loss": 0.0217, "step": 9922 }, { "epoch": 18.308402585410896, "grad_norm": 0.2707814574241638, "learning_rate": 2.976e-05, "loss": 0.0131, "step": 9923 }, { "epoch": 18.310249307479225, "grad_norm": 0.4414331018924713, "learning_rate": 2.9763e-05, "loss": 0.0129, "step": 9924 }, { "epoch": 18.312096029547554, "grad_norm": 0.3390336334705353, "learning_rate": 2.9766e-05, "loss": 0.014, "step": 9925 }, { "epoch": 18.313942751615883, "grad_norm": 0.35256242752075195, "learning_rate": 2.9769e-05, "loss": 0.0123, "step": 9926 }, { "epoch": 18.31578947368421, "grad_norm": 0.2255878895521164, "learning_rate": 2.9772e-05, "loss": 0.0143, "step": 9927 }, { "epoch": 18.317636195752538, "grad_norm": 0.19617587327957153, "learning_rate": 2.9775000000000002e-05, "loss": 0.0119, "step": 9928 }, { "epoch": 18.319482917820867, "grad_norm": 0.5293916463851929, "learning_rate": 2.9778000000000002e-05, "loss": 0.0222, "step": 9929 }, { "epoch": 18.321329639889196, "grad_norm": 0.3983894884586334, "learning_rate": 2.9781e-05, "loss": 0.0119, "step": 9930 }, { "epoch": 18.323176361957525, "grad_norm": 0.1999145895242691, "learning_rate": 2.9784e-05, "loss": 0.008, "step": 9931 }, { "epoch": 18.325023084025855, "grad_norm": 0.21079927682876587, "learning_rate": 2.9787e-05, "loss": 0.0083, "step": 9932 }, { "epoch": 18.326869806094184, "grad_norm": 0.18804602324962616, "learning_rate": 2.979e-05, "loss": 0.0102, "step": 9933 }, { "epoch": 18.328716528162513, "grad_norm": 0.2636594772338867, "learning_rate": 2.9793e-05, "loss": 0.0094, "step": 9934 }, { "epoch": 18.330563250230842, "grad_norm": 0.22981365025043488, "learning_rate": 2.9796e-05, "loss": 0.0278, "step": 9935 }, { "epoch": 18.332409972299168, "grad_norm": 0.2265322506427765, "learning_rate": 2.9799e-05, "loss": 0.0121, "step": 9936 }, { "epoch": 18.334256694367497, "grad_norm": 0.2998930513858795, "learning_rate": 2.9802e-05, "loss": 0.0338, "step": 9937 }, { "epoch": 18.336103416435826, "grad_norm": 0.44089028239250183, "learning_rate": 2.9805000000000003e-05, "loss": 0.0103, "step": 9938 }, { "epoch": 18.337950138504155, "grad_norm": 0.36817842721939087, "learning_rate": 2.9808000000000003e-05, "loss": 0.0167, "step": 9939 }, { "epoch": 18.339796860572484, "grad_norm": 0.17441830039024353, "learning_rate": 2.9811000000000003e-05, "loss": 0.0089, "step": 9940 }, { "epoch": 18.341643582640813, "grad_norm": 0.5821911096572876, "learning_rate": 2.9814000000000003e-05, "loss": 0.0133, "step": 9941 }, { "epoch": 18.343490304709142, "grad_norm": 0.21145932376384735, "learning_rate": 2.9817e-05, "loss": 0.0084, "step": 9942 }, { "epoch": 18.34533702677747, "grad_norm": 0.38200899958610535, "learning_rate": 2.982e-05, "loss": 0.0093, "step": 9943 }, { "epoch": 18.347183748845797, "grad_norm": 0.6706154942512512, "learning_rate": 2.9823e-05, "loss": 0.0113, "step": 9944 }, { "epoch": 18.349030470914126, "grad_norm": 0.31891512870788574, "learning_rate": 2.9826e-05, "loss": 0.0078, "step": 9945 }, { "epoch": 18.350877192982455, "grad_norm": 0.47149425745010376, "learning_rate": 2.9829e-05, "loss": 0.0139, "step": 9946 }, { "epoch": 18.352723915050785, "grad_norm": 0.30881768465042114, "learning_rate": 2.9831999999999998e-05, "loss": 0.0081, "step": 9947 }, { "epoch": 18.354570637119114, "grad_norm": 0.6706964373588562, "learning_rate": 2.9835e-05, "loss": 0.0267, "step": 9948 }, { "epoch": 18.356417359187443, "grad_norm": 0.5232123136520386, "learning_rate": 2.9838e-05, "loss": 0.0109, "step": 9949 }, { "epoch": 18.358264081255772, "grad_norm": 0.3877739906311035, "learning_rate": 2.9841e-05, "loss": 0.0165, "step": 9950 }, { "epoch": 18.3601108033241, "grad_norm": 0.4155578017234802, "learning_rate": 2.9844e-05, "loss": 0.0135, "step": 9951 }, { "epoch": 18.361957525392427, "grad_norm": 0.5564242005348206, "learning_rate": 2.9847e-05, "loss": 0.0188, "step": 9952 }, { "epoch": 18.363804247460756, "grad_norm": 0.4501713812351227, "learning_rate": 2.985e-05, "loss": 0.0102, "step": 9953 }, { "epoch": 18.365650969529085, "grad_norm": 0.15342417359352112, "learning_rate": 2.9853e-05, "loss": 0.0051, "step": 9954 }, { "epoch": 18.367497691597414, "grad_norm": 0.46972164511680603, "learning_rate": 2.9856e-05, "loss": 0.0154, "step": 9955 }, { "epoch": 18.369344413665743, "grad_norm": 0.4050143361091614, "learning_rate": 2.9859e-05, "loss": 0.0167, "step": 9956 }, { "epoch": 18.371191135734072, "grad_norm": 0.7018055319786072, "learning_rate": 2.9862e-05, "loss": 0.1757, "step": 9957 }, { "epoch": 18.3730378578024, "grad_norm": 0.46269240975379944, "learning_rate": 2.9865000000000003e-05, "loss": 0.1259, "step": 9958 }, { "epoch": 18.37488457987073, "grad_norm": 0.3974771201610565, "learning_rate": 2.9868000000000003e-05, "loss": 0.0835, "step": 9959 }, { "epoch": 18.37673130193906, "grad_norm": 0.558796226978302, "learning_rate": 2.9871000000000003e-05, "loss": 0.094, "step": 9960 }, { "epoch": 18.378578024007385, "grad_norm": 0.4177483320236206, "learning_rate": 2.9874000000000002e-05, "loss": 0.0545, "step": 9961 }, { "epoch": 18.380424746075715, "grad_norm": 0.4701012969017029, "learning_rate": 2.9877000000000002e-05, "loss": 0.0512, "step": 9962 }, { "epoch": 18.382271468144044, "grad_norm": 0.5373863577842712, "learning_rate": 2.9880000000000002e-05, "loss": 0.0975, "step": 9963 }, { "epoch": 18.384118190212373, "grad_norm": 0.5024900436401367, "learning_rate": 2.9883000000000002e-05, "loss": 0.0734, "step": 9964 }, { "epoch": 18.385964912280702, "grad_norm": 0.4085754454135895, "learning_rate": 2.9886e-05, "loss": 0.0475, "step": 9965 }, { "epoch": 18.38781163434903, "grad_norm": 0.4630957245826721, "learning_rate": 2.9889e-05, "loss": 0.0739, "step": 9966 }, { "epoch": 18.38965835641736, "grad_norm": 0.7340787649154663, "learning_rate": 2.9891999999999998e-05, "loss": 0.0373, "step": 9967 }, { "epoch": 18.39150507848569, "grad_norm": 0.3731462061405182, "learning_rate": 2.9895e-05, "loss": 0.0265, "step": 9968 }, { "epoch": 18.393351800554015, "grad_norm": 0.7383576035499573, "learning_rate": 2.9898e-05, "loss": 0.0926, "step": 9969 }, { "epoch": 18.395198522622344, "grad_norm": 0.5255272388458252, "learning_rate": 2.9901e-05, "loss": 0.0295, "step": 9970 }, { "epoch": 18.397045244690673, "grad_norm": 0.45948368310928345, "learning_rate": 2.9904e-05, "loss": 0.0407, "step": 9971 }, { "epoch": 18.398891966759003, "grad_norm": 0.8766978979110718, "learning_rate": 2.9907e-05, "loss": 0.0314, "step": 9972 }, { "epoch": 18.40073868882733, "grad_norm": 24.089262008666992, "learning_rate": 2.991e-05, "loss": 0.8891, "step": 9973 }, { "epoch": 18.40258541089566, "grad_norm": 4.03217887878418, "learning_rate": 2.9913e-05, "loss": 0.1203, "step": 9974 }, { "epoch": 18.40443213296399, "grad_norm": 0.5048349499702454, "learning_rate": 2.9916e-05, "loss": 0.0239, "step": 9975 }, { "epoch": 18.40627885503232, "grad_norm": 0.3953116536140442, "learning_rate": 2.9919e-05, "loss": 0.0257, "step": 9976 }, { "epoch": 18.408125577100645, "grad_norm": 0.3035966753959656, "learning_rate": 2.9922e-05, "loss": 0.0245, "step": 9977 }, { "epoch": 18.409972299168974, "grad_norm": 0.22685669362545013, "learning_rate": 2.9925000000000002e-05, "loss": 0.0121, "step": 9978 }, { "epoch": 18.411819021237303, "grad_norm": 0.34173470735549927, "learning_rate": 2.9928000000000002e-05, "loss": 0.0345, "step": 9979 }, { "epoch": 18.413665743305632, "grad_norm": 0.32935476303100586, "learning_rate": 2.9931000000000002e-05, "loss": 0.0144, "step": 9980 }, { "epoch": 18.41551246537396, "grad_norm": 0.27030572295188904, "learning_rate": 2.9934000000000002e-05, "loss": 0.0093, "step": 9981 }, { "epoch": 18.41735918744229, "grad_norm": 0.32887694239616394, "learning_rate": 2.9937e-05, "loss": 0.0151, "step": 9982 }, { "epoch": 18.41920590951062, "grad_norm": 0.4195692241191864, "learning_rate": 2.994e-05, "loss": 0.0142, "step": 9983 }, { "epoch": 18.42105263157895, "grad_norm": 0.21854715049266815, "learning_rate": 2.9943e-05, "loss": 0.015, "step": 9984 }, { "epoch": 18.422899353647278, "grad_norm": 0.2712455093860626, "learning_rate": 2.9946e-05, "loss": 0.0078, "step": 9985 }, { "epoch": 18.424746075715603, "grad_norm": 0.2517973780632019, "learning_rate": 2.9949e-05, "loss": 0.0099, "step": 9986 }, { "epoch": 18.426592797783933, "grad_norm": 0.44632387161254883, "learning_rate": 2.9952e-05, "loss": 0.0156, "step": 9987 }, { "epoch": 18.42843951985226, "grad_norm": 0.7028042674064636, "learning_rate": 2.9955000000000004e-05, "loss": 0.0141, "step": 9988 }, { "epoch": 18.43028624192059, "grad_norm": 0.26331591606140137, "learning_rate": 2.9958000000000004e-05, "loss": 0.0103, "step": 9989 }, { "epoch": 18.43213296398892, "grad_norm": 0.1908412128686905, "learning_rate": 2.9961000000000003e-05, "loss": 0.0057, "step": 9990 }, { "epoch": 18.43397968605725, "grad_norm": 0.28224021196365356, "learning_rate": 2.9964e-05, "loss": 0.0078, "step": 9991 }, { "epoch": 18.43582640812558, "grad_norm": 0.369598925113678, "learning_rate": 2.9967e-05, "loss": 0.0101, "step": 9992 }, { "epoch": 18.437673130193907, "grad_norm": 0.4752858281135559, "learning_rate": 2.997e-05, "loss": 0.0109, "step": 9993 }, { "epoch": 18.439519852262233, "grad_norm": 0.35530319809913635, "learning_rate": 2.9973e-05, "loss": 0.0107, "step": 9994 }, { "epoch": 18.441366574330562, "grad_norm": 3.2367780208587646, "learning_rate": 2.9976e-05, "loss": 0.0246, "step": 9995 }, { "epoch": 18.44321329639889, "grad_norm": 0.46068835258483887, "learning_rate": 2.9979e-05, "loss": 0.0106, "step": 9996 }, { "epoch": 18.44506001846722, "grad_norm": 0.462532639503479, "learning_rate": 2.9982e-05, "loss": 0.0207, "step": 9997 }, { "epoch": 18.44690674053555, "grad_norm": 0.29335060715675354, "learning_rate": 2.9985000000000002e-05, "loss": 0.012, "step": 9998 }, { "epoch": 18.44875346260388, "grad_norm": 0.34464287757873535, "learning_rate": 2.9988e-05, "loss": 0.0168, "step": 9999 }, { "epoch": 18.450600184672208, "grad_norm": 0.5311354398727417, "learning_rate": 2.9991e-05, "loss": 0.0198, "step": 10000 }, { "epoch": 18.450600184672208, "eval_cer": 0.11346138891049071, "eval_loss": 0.400764524936676, "eval_runtime": 16.6034, "eval_samples_per_second": 18.31, "eval_steps_per_second": 0.602, "eval_wer": 0.4002302379125096, "step": 10000 }, { "epoch": 18.452446906740537, "grad_norm": 1.1134467124938965, "learning_rate": 2.9994e-05, "loss": 0.0213, "step": 10001 }, { "epoch": 18.454293628808863, "grad_norm": 0.3000761568546295, "learning_rate": 2.9997e-05, "loss": 0.0153, "step": 10002 }, { "epoch": 18.45614035087719, "grad_norm": 0.6289483308792114, "learning_rate": 3e-05, "loss": 0.0259, "step": 10003 }, { "epoch": 18.45798707294552, "grad_norm": 0.45202159881591797, "learning_rate": 2.999966666666667e-05, "loss": 0.0163, "step": 10004 }, { "epoch": 18.45983379501385, "grad_norm": 0.435899555683136, "learning_rate": 2.9999333333333333e-05, "loss": 0.0245, "step": 10005 }, { "epoch": 18.46168051708218, "grad_norm": 0.5257439017295837, "learning_rate": 2.9999000000000002e-05, "loss": 0.0327, "step": 10006 }, { "epoch": 18.46352723915051, "grad_norm": 1.0339168310165405, "learning_rate": 2.9998666666666668e-05, "loss": 0.2506, "step": 10007 }, { "epoch": 18.465373961218837, "grad_norm": 0.5932863354682922, "learning_rate": 2.9998333333333334e-05, "loss": 0.1746, "step": 10008 }, { "epoch": 18.467220683287167, "grad_norm": 0.6096854209899902, "learning_rate": 2.9998e-05, "loss": 0.1864, "step": 10009 }, { "epoch": 18.469067405355496, "grad_norm": 0.49512627720832825, "learning_rate": 2.999766666666667e-05, "loss": 0.0995, "step": 10010 }, { "epoch": 18.47091412742382, "grad_norm": 0.4499821960926056, "learning_rate": 2.999733333333333e-05, "loss": 0.0955, "step": 10011 }, { "epoch": 18.47276084949215, "grad_norm": 0.5281078219413757, "learning_rate": 2.9997e-05, "loss": 0.0588, "step": 10012 }, { "epoch": 18.47460757156048, "grad_norm": 0.5754829049110413, "learning_rate": 2.999666666666667e-05, "loss": 0.0842, "step": 10013 }, { "epoch": 18.47645429362881, "grad_norm": 0.4375404417514801, "learning_rate": 2.9996333333333333e-05, "loss": 0.0733, "step": 10014 }, { "epoch": 18.478301015697138, "grad_norm": 0.4932893216609955, "learning_rate": 2.9996000000000002e-05, "loss": 0.0756, "step": 10015 }, { "epoch": 18.480147737765467, "grad_norm": 0.6486257314682007, "learning_rate": 2.9995666666666668e-05, "loss": 0.1231, "step": 10016 }, { "epoch": 18.481994459833796, "grad_norm": 0.3846319317817688, "learning_rate": 2.9995333333333334e-05, "loss": 0.0681, "step": 10017 }, { "epoch": 18.483841181902125, "grad_norm": 1.1660290956497192, "learning_rate": 2.9995e-05, "loss": 0.0722, "step": 10018 }, { "epoch": 18.48568790397045, "grad_norm": 0.4112194776535034, "learning_rate": 2.9994666666666666e-05, "loss": 0.0335, "step": 10019 }, { "epoch": 18.48753462603878, "grad_norm": 0.29463788866996765, "learning_rate": 2.9994333333333335e-05, "loss": 0.0234, "step": 10020 }, { "epoch": 18.48938134810711, "grad_norm": 0.37340521812438965, "learning_rate": 2.9994e-05, "loss": 0.039, "step": 10021 }, { "epoch": 18.49122807017544, "grad_norm": 0.7228174209594727, "learning_rate": 2.9993666666666667e-05, "loss": 0.0459, "step": 10022 }, { "epoch": 18.493074792243767, "grad_norm": 0.4925774335861206, "learning_rate": 2.9993333333333333e-05, "loss": 0.0171, "step": 10023 }, { "epoch": 18.494921514312097, "grad_norm": 0.5816299319267273, "learning_rate": 2.9993000000000002e-05, "loss": 0.0207, "step": 10024 }, { "epoch": 18.496768236380426, "grad_norm": 0.442291796207428, "learning_rate": 2.9992666666666665e-05, "loss": 0.0354, "step": 10025 }, { "epoch": 18.498614958448755, "grad_norm": 0.2628393769264221, "learning_rate": 2.9992333333333334e-05, "loss": 0.0115, "step": 10026 }, { "epoch": 18.50046168051708, "grad_norm": 0.33153483271598816, "learning_rate": 2.9992e-05, "loss": 0.0152, "step": 10027 }, { "epoch": 18.50230840258541, "grad_norm": 0.3420373797416687, "learning_rate": 2.9991666666666666e-05, "loss": 0.0238, "step": 10028 }, { "epoch": 18.50415512465374, "grad_norm": 0.6902183890342712, "learning_rate": 2.9991333333333335e-05, "loss": 0.0103, "step": 10029 }, { "epoch": 18.506001846722068, "grad_norm": 0.3361265957355499, "learning_rate": 2.9991e-05, "loss": 0.0204, "step": 10030 }, { "epoch": 18.507848568790397, "grad_norm": 0.32917317748069763, "learning_rate": 2.9990666666666667e-05, "loss": 0.0101, "step": 10031 }, { "epoch": 18.509695290858726, "grad_norm": 1.2217377424240112, "learning_rate": 2.9990333333333333e-05, "loss": 0.0163, "step": 10032 }, { "epoch": 18.511542012927055, "grad_norm": 0.3950839638710022, "learning_rate": 2.9990000000000003e-05, "loss": 0.0213, "step": 10033 }, { "epoch": 18.513388734995385, "grad_norm": 0.24514494836330414, "learning_rate": 2.9989666666666665e-05, "loss": 0.0075, "step": 10034 }, { "epoch": 18.51523545706371, "grad_norm": 0.4704078733921051, "learning_rate": 2.9989333333333334e-05, "loss": 0.015, "step": 10035 }, { "epoch": 18.51708217913204, "grad_norm": 0.522798478603363, "learning_rate": 2.9989e-05, "loss": 0.0175, "step": 10036 }, { "epoch": 18.51892890120037, "grad_norm": 0.1550319790840149, "learning_rate": 2.9988666666666666e-05, "loss": 0.0086, "step": 10037 }, { "epoch": 18.520775623268698, "grad_norm": 0.20868565142154694, "learning_rate": 2.9988333333333336e-05, "loss": 0.0088, "step": 10038 }, { "epoch": 18.522622345337027, "grad_norm": 0.6053646802902222, "learning_rate": 2.9988e-05, "loss": 0.0214, "step": 10039 }, { "epoch": 18.524469067405356, "grad_norm": 0.7348169088363647, "learning_rate": 2.9987666666666667e-05, "loss": 0.0219, "step": 10040 }, { "epoch": 18.526315789473685, "grad_norm": 0.3019208014011383, "learning_rate": 2.9987333333333333e-05, "loss": 0.0156, "step": 10041 }, { "epoch": 18.528162511542014, "grad_norm": 0.9538294672966003, "learning_rate": 2.9987000000000003e-05, "loss": 0.0154, "step": 10042 }, { "epoch": 18.530009233610343, "grad_norm": 0.9015957117080688, "learning_rate": 2.9986666666666665e-05, "loss": 0.0223, "step": 10043 }, { "epoch": 18.53185595567867, "grad_norm": 0.31876128911972046, "learning_rate": 2.9986333333333335e-05, "loss": 0.0102, "step": 10044 }, { "epoch": 18.533702677746998, "grad_norm": 0.22382700443267822, "learning_rate": 2.9986000000000004e-05, "loss": 0.0146, "step": 10045 }, { "epoch": 18.535549399815327, "grad_norm": 0.3416086733341217, "learning_rate": 2.9985666666666666e-05, "loss": 0.0124, "step": 10046 }, { "epoch": 18.537396121883656, "grad_norm": 0.42624083161354065, "learning_rate": 2.9985333333333336e-05, "loss": 0.0128, "step": 10047 }, { "epoch": 18.539242843951985, "grad_norm": 0.8317400217056274, "learning_rate": 2.9985000000000002e-05, "loss": 0.0254, "step": 10048 }, { "epoch": 18.541089566020315, "grad_norm": 0.7831063866615295, "learning_rate": 2.9984666666666668e-05, "loss": 0.0205, "step": 10049 }, { "epoch": 18.542936288088644, "grad_norm": 0.3047102391719818, "learning_rate": 2.9984333333333334e-05, "loss": 0.0147, "step": 10050 }, { "epoch": 18.544783010156973, "grad_norm": 0.3798275291919708, "learning_rate": 2.9984e-05, "loss": 0.0135, "step": 10051 }, { "epoch": 18.5466297322253, "grad_norm": 0.5144087076187134, "learning_rate": 2.9983666666666665e-05, "loss": 0.0144, "step": 10052 }, { "epoch": 18.548476454293628, "grad_norm": 0.3479164242744446, "learning_rate": 2.9983333333333335e-05, "loss": 0.0189, "step": 10053 }, { "epoch": 18.550323176361957, "grad_norm": 0.29653459787368774, "learning_rate": 2.9983e-05, "loss": 0.0286, "step": 10054 }, { "epoch": 18.552169898430286, "grad_norm": 0.5336138606071472, "learning_rate": 2.9982666666666667e-05, "loss": 0.0248, "step": 10055 }, { "epoch": 18.554016620498615, "grad_norm": 0.43095484375953674, "learning_rate": 2.9982333333333336e-05, "loss": 0.0118, "step": 10056 }, { "epoch": 18.555863342566944, "grad_norm": 0.8118236660957336, "learning_rate": 2.9982e-05, "loss": 0.1899, "step": 10057 }, { "epoch": 18.557710064635273, "grad_norm": 1.0012409687042236, "learning_rate": 2.9981666666666668e-05, "loss": 0.2917, "step": 10058 }, { "epoch": 18.559556786703602, "grad_norm": 0.6601340174674988, "learning_rate": 2.9981333333333334e-05, "loss": 0.1182, "step": 10059 }, { "epoch": 18.56140350877193, "grad_norm": 0.6776450872421265, "learning_rate": 2.9981e-05, "loss": 0.1185, "step": 10060 }, { "epoch": 18.563250230840257, "grad_norm": 0.588324248790741, "learning_rate": 2.9980666666666666e-05, "loss": 0.1353, "step": 10061 }, { "epoch": 18.565096952908586, "grad_norm": 0.5905701518058777, "learning_rate": 2.9980333333333335e-05, "loss": 0.1602, "step": 10062 }, { "epoch": 18.566943674976915, "grad_norm": 0.5700792074203491, "learning_rate": 2.998e-05, "loss": 0.1245, "step": 10063 }, { "epoch": 18.568790397045245, "grad_norm": 0.5920236706733704, "learning_rate": 2.9979666666666667e-05, "loss": 0.0669, "step": 10064 }, { "epoch": 18.570637119113574, "grad_norm": 0.7023180723190308, "learning_rate": 2.9979333333333336e-05, "loss": 0.1148, "step": 10065 }, { "epoch": 18.572483841181903, "grad_norm": 0.3596070110797882, "learning_rate": 2.9979e-05, "loss": 0.0552, "step": 10066 }, { "epoch": 18.574330563250232, "grad_norm": 0.765246570110321, "learning_rate": 2.9978666666666668e-05, "loss": 0.0738, "step": 10067 }, { "epoch": 18.57617728531856, "grad_norm": 0.5413464307785034, "learning_rate": 2.9978333333333334e-05, "loss": 0.07, "step": 10068 }, { "epoch": 18.578024007386887, "grad_norm": 0.5046939253807068, "learning_rate": 2.9978e-05, "loss": 0.051, "step": 10069 }, { "epoch": 18.579870729455216, "grad_norm": 0.3605642020702362, "learning_rate": 2.997766666666667e-05, "loss": 0.037, "step": 10070 }, { "epoch": 18.581717451523545, "grad_norm": 0.3019397556781769, "learning_rate": 2.9977333333333335e-05, "loss": 0.0261, "step": 10071 }, { "epoch": 18.583564173591874, "grad_norm": 0.468206524848938, "learning_rate": 2.9977e-05, "loss": 0.0353, "step": 10072 }, { "epoch": 18.585410895660203, "grad_norm": 0.2793387770652771, "learning_rate": 2.9976666666666667e-05, "loss": 0.0193, "step": 10073 }, { "epoch": 18.587257617728532, "grad_norm": 0.33137214183807373, "learning_rate": 2.9976333333333336e-05, "loss": 0.0214, "step": 10074 }, { "epoch": 18.58910433979686, "grad_norm": 0.8799513578414917, "learning_rate": 2.9976e-05, "loss": 0.0167, "step": 10075 }, { "epoch": 18.59095106186519, "grad_norm": 0.1753733605146408, "learning_rate": 2.9975666666666668e-05, "loss": 0.0092, "step": 10076 }, { "epoch": 18.592797783933516, "grad_norm": 0.3424614667892456, "learning_rate": 2.9975333333333334e-05, "loss": 0.0161, "step": 10077 }, { "epoch": 18.594644506001845, "grad_norm": 0.1594821959733963, "learning_rate": 2.9975e-05, "loss": 0.0104, "step": 10078 }, { "epoch": 18.596491228070175, "grad_norm": 0.401669979095459, "learning_rate": 2.997466666666667e-05, "loss": 0.0175, "step": 10079 }, { "epoch": 18.598337950138504, "grad_norm": 0.5343904495239258, "learning_rate": 2.9974333333333332e-05, "loss": 0.0169, "step": 10080 }, { "epoch": 18.600184672206833, "grad_norm": 0.639664351940155, "learning_rate": 2.9974e-05, "loss": 0.0211, "step": 10081 }, { "epoch": 18.602031394275162, "grad_norm": 0.34157347679138184, "learning_rate": 2.9973666666666667e-05, "loss": 0.0319, "step": 10082 }, { "epoch": 18.60387811634349, "grad_norm": 0.8372710943222046, "learning_rate": 2.9973333333333333e-05, "loss": 0.0505, "step": 10083 }, { "epoch": 18.60572483841182, "grad_norm": 0.23561322689056396, "learning_rate": 2.9973e-05, "loss": 0.0081, "step": 10084 }, { "epoch": 18.607571560480146, "grad_norm": 0.3529277741909027, "learning_rate": 2.997266666666667e-05, "loss": 0.0089, "step": 10085 }, { "epoch": 18.609418282548475, "grad_norm": 0.2528364658355713, "learning_rate": 2.997233333333333e-05, "loss": 0.0144, "step": 10086 }, { "epoch": 18.611265004616804, "grad_norm": 0.513348400592804, "learning_rate": 2.9972e-05, "loss": 0.0071, "step": 10087 }, { "epoch": 18.613111726685133, "grad_norm": 0.332783579826355, "learning_rate": 2.997166666666667e-05, "loss": 0.0113, "step": 10088 }, { "epoch": 18.614958448753463, "grad_norm": 0.2563062310218811, "learning_rate": 2.9971333333333332e-05, "loss": 0.0111, "step": 10089 }, { "epoch": 18.61680517082179, "grad_norm": 0.39945706725120544, "learning_rate": 2.9971e-05, "loss": 0.0078, "step": 10090 }, { "epoch": 18.61865189289012, "grad_norm": 0.30604419112205505, "learning_rate": 2.9970666666666667e-05, "loss": 0.0099, "step": 10091 }, { "epoch": 18.62049861495845, "grad_norm": 0.3137086033821106, "learning_rate": 2.9970333333333333e-05, "loss": 0.0108, "step": 10092 }, { "epoch": 18.62234533702678, "grad_norm": 0.3253783881664276, "learning_rate": 2.997e-05, "loss": 0.0171, "step": 10093 }, { "epoch": 18.624192059095105, "grad_norm": 1.2647370100021362, "learning_rate": 2.996966666666667e-05, "loss": 0.1085, "step": 10094 }, { "epoch": 18.626038781163434, "grad_norm": 0.2971855103969574, "learning_rate": 2.9969333333333335e-05, "loss": 0.0139, "step": 10095 }, { "epoch": 18.627885503231763, "grad_norm": 0.29171839356422424, "learning_rate": 2.9969e-05, "loss": 0.0123, "step": 10096 }, { "epoch": 18.629732225300092, "grad_norm": 0.3409866392612457, "learning_rate": 2.996866666666667e-05, "loss": 0.0153, "step": 10097 }, { "epoch": 18.63157894736842, "grad_norm": 0.3979312479496002, "learning_rate": 2.9968333333333332e-05, "loss": 0.0191, "step": 10098 }, { "epoch": 18.63342566943675, "grad_norm": 0.4440046548843384, "learning_rate": 2.9968000000000002e-05, "loss": 0.0141, "step": 10099 }, { "epoch": 18.63527239150508, "grad_norm": 0.3148280382156372, "learning_rate": 2.9967666666666668e-05, "loss": 0.0157, "step": 10100 }, { "epoch": 18.63711911357341, "grad_norm": 0.7379143834114075, "learning_rate": 2.9967333333333334e-05, "loss": 0.0142, "step": 10101 }, { "epoch": 18.638965835641734, "grad_norm": 0.4341832399368286, "learning_rate": 2.9967e-05, "loss": 0.0222, "step": 10102 }, { "epoch": 18.640812557710063, "grad_norm": 0.25606244802474976, "learning_rate": 2.996666666666667e-05, "loss": 0.0088, "step": 10103 }, { "epoch": 18.642659279778393, "grad_norm": 0.32717734575271606, "learning_rate": 2.9966333333333335e-05, "loss": 0.0195, "step": 10104 }, { "epoch": 18.64450600184672, "grad_norm": 0.5025584697723389, "learning_rate": 2.9966e-05, "loss": 0.0113, "step": 10105 }, { "epoch": 18.64635272391505, "grad_norm": 0.30470889806747437, "learning_rate": 2.996566666666667e-05, "loss": 0.0127, "step": 10106 }, { "epoch": 18.64819944598338, "grad_norm": 3.4498097896575928, "learning_rate": 2.9965333333333333e-05, "loss": 0.2126, "step": 10107 }, { "epoch": 18.65004616805171, "grad_norm": 0.6399456262588501, "learning_rate": 2.9965000000000002e-05, "loss": 0.1395, "step": 10108 }, { "epoch": 18.65189289012004, "grad_norm": 0.5964101552963257, "learning_rate": 2.9964666666666664e-05, "loss": 0.1193, "step": 10109 }, { "epoch": 18.653739612188367, "grad_norm": 1.0935943126678467, "learning_rate": 2.9964333333333334e-05, "loss": 0.1248, "step": 10110 }, { "epoch": 18.655586334256693, "grad_norm": 1.1805049180984497, "learning_rate": 2.9964e-05, "loss": 0.1114, "step": 10111 }, { "epoch": 18.657433056325022, "grad_norm": 0.5595899224281311, "learning_rate": 2.9963666666666666e-05, "loss": 0.0861, "step": 10112 }, { "epoch": 18.65927977839335, "grad_norm": 0.4162147343158722, "learning_rate": 2.9963333333333335e-05, "loss": 0.056, "step": 10113 }, { "epoch": 18.66112650046168, "grad_norm": 0.8674030303955078, "learning_rate": 2.9963e-05, "loss": 0.1408, "step": 10114 }, { "epoch": 18.66297322253001, "grad_norm": 0.37030184268951416, "learning_rate": 2.9962666666666667e-05, "loss": 0.0534, "step": 10115 }, { "epoch": 18.66481994459834, "grad_norm": 0.4186866283416748, "learning_rate": 2.9962333333333333e-05, "loss": 0.0514, "step": 10116 }, { "epoch": 18.666666666666668, "grad_norm": 0.776822566986084, "learning_rate": 2.9962000000000002e-05, "loss": 0.0497, "step": 10117 }, { "epoch": 18.668513388734997, "grad_norm": 0.41464748978614807, "learning_rate": 2.9961666666666665e-05, "loss": 0.0481, "step": 10118 }, { "epoch": 18.670360110803323, "grad_norm": 1.1878679990768433, "learning_rate": 2.9961333333333334e-05, "loss": 0.0557, "step": 10119 }, { "epoch": 18.67220683287165, "grad_norm": 0.563939094543457, "learning_rate": 2.9961000000000003e-05, "loss": 0.0647, "step": 10120 }, { "epoch": 18.67405355493998, "grad_norm": 0.8240351676940918, "learning_rate": 2.9960666666666666e-05, "loss": 0.0339, "step": 10121 }, { "epoch": 18.67590027700831, "grad_norm": 0.19536948204040527, "learning_rate": 2.9960333333333335e-05, "loss": 0.0106, "step": 10122 }, { "epoch": 18.67774699907664, "grad_norm": 1.515914797782898, "learning_rate": 2.996e-05, "loss": 0.0125, "step": 10123 }, { "epoch": 18.67959372114497, "grad_norm": 0.20640091598033905, "learning_rate": 2.9959666666666667e-05, "loss": 0.0189, "step": 10124 }, { "epoch": 18.681440443213297, "grad_norm": 0.44630637764930725, "learning_rate": 2.9959333333333333e-05, "loss": 0.0666, "step": 10125 }, { "epoch": 18.683287165281627, "grad_norm": 0.34298625588417053, "learning_rate": 2.9959000000000002e-05, "loss": 0.044, "step": 10126 }, { "epoch": 18.685133887349952, "grad_norm": 0.4227137267589569, "learning_rate": 2.9958666666666665e-05, "loss": 0.0153, "step": 10127 }, { "epoch": 18.68698060941828, "grad_norm": 0.21673138439655304, "learning_rate": 2.9958333333333334e-05, "loss": 0.01, "step": 10128 }, { "epoch": 18.68882733148661, "grad_norm": 0.1855924427509308, "learning_rate": 2.9958000000000004e-05, "loss": 0.0117, "step": 10129 }, { "epoch": 18.69067405355494, "grad_norm": 0.32578045129776, "learning_rate": 2.9957666666666666e-05, "loss": 0.0059, "step": 10130 }, { "epoch": 18.69252077562327, "grad_norm": 0.38381683826446533, "learning_rate": 2.9957333333333335e-05, "loss": 0.0166, "step": 10131 }, { "epoch": 18.694367497691598, "grad_norm": 0.4086329936981201, "learning_rate": 2.9957e-05, "loss": 0.0116, "step": 10132 }, { "epoch": 18.696214219759927, "grad_norm": 0.5231785178184509, "learning_rate": 2.9956666666666667e-05, "loss": 0.0156, "step": 10133 }, { "epoch": 18.698060941828256, "grad_norm": 0.22241975367069244, "learning_rate": 2.9956333333333333e-05, "loss": 0.0072, "step": 10134 }, { "epoch": 18.69990766389658, "grad_norm": 0.3974694013595581, "learning_rate": 2.9956000000000003e-05, "loss": 0.019, "step": 10135 }, { "epoch": 18.70175438596491, "grad_norm": 0.32537904381752014, "learning_rate": 2.9955666666666665e-05, "loss": 0.0131, "step": 10136 }, { "epoch": 18.70360110803324, "grad_norm": 0.6127800941467285, "learning_rate": 2.9955333333333334e-05, "loss": 0.0242, "step": 10137 }, { "epoch": 18.70544783010157, "grad_norm": 0.29393771290779114, "learning_rate": 2.9955000000000004e-05, "loss": 0.0121, "step": 10138 }, { "epoch": 18.7072945521699, "grad_norm": 0.4710220992565155, "learning_rate": 2.9954666666666666e-05, "loss": 0.0169, "step": 10139 }, { "epoch": 18.709141274238227, "grad_norm": 0.3239732086658478, "learning_rate": 2.9954333333333336e-05, "loss": 0.0096, "step": 10140 }, { "epoch": 18.710987996306557, "grad_norm": 0.3971695601940155, "learning_rate": 2.9953999999999998e-05, "loss": 0.0108, "step": 10141 }, { "epoch": 18.712834718374886, "grad_norm": 0.37625211477279663, "learning_rate": 2.9953666666666667e-05, "loss": 0.0158, "step": 10142 }, { "epoch": 18.714681440443215, "grad_norm": 0.6671343445777893, "learning_rate": 2.9953333333333333e-05, "loss": 0.0184, "step": 10143 }, { "epoch": 18.71652816251154, "grad_norm": 0.4562927186489105, "learning_rate": 2.9953e-05, "loss": 0.0165, "step": 10144 }, { "epoch": 18.71837488457987, "grad_norm": 0.270826518535614, "learning_rate": 2.995266666666667e-05, "loss": 0.0107, "step": 10145 }, { "epoch": 18.7202216066482, "grad_norm": 0.2679862678050995, "learning_rate": 2.9952333333333335e-05, "loss": 0.0163, "step": 10146 }, { "epoch": 18.722068328716528, "grad_norm": 0.42214062809944153, "learning_rate": 2.9952e-05, "loss": 0.013, "step": 10147 }, { "epoch": 18.723915050784857, "grad_norm": 0.3402858376502991, "learning_rate": 2.9951666666666666e-05, "loss": 0.0124, "step": 10148 }, { "epoch": 18.725761772853186, "grad_norm": 0.38823774456977844, "learning_rate": 2.9951333333333336e-05, "loss": 0.017, "step": 10149 }, { "epoch": 18.727608494921515, "grad_norm": 0.6362157464027405, "learning_rate": 2.9951e-05, "loss": 0.0208, "step": 10150 }, { "epoch": 18.729455216989845, "grad_norm": 0.21173816919326782, "learning_rate": 2.9950666666666668e-05, "loss": 0.0083, "step": 10151 }, { "epoch": 18.73130193905817, "grad_norm": 0.5997298955917358, "learning_rate": 2.9950333333333334e-05, "loss": 0.0177, "step": 10152 }, { "epoch": 18.7331486611265, "grad_norm": 0.670458972454071, "learning_rate": 2.995e-05, "loss": 0.0171, "step": 10153 }, { "epoch": 18.73499538319483, "grad_norm": 0.4021938741207123, "learning_rate": 2.994966666666667e-05, "loss": 0.0112, "step": 10154 }, { "epoch": 18.736842105263158, "grad_norm": 0.7811278104782104, "learning_rate": 2.9949333333333335e-05, "loss": 0.0159, "step": 10155 }, { "epoch": 18.738688827331487, "grad_norm": 0.6017845273017883, "learning_rate": 2.9949e-05, "loss": 0.016, "step": 10156 }, { "epoch": 18.740535549399816, "grad_norm": 1.6666468381881714, "learning_rate": 2.9948666666666667e-05, "loss": 0.3443, "step": 10157 }, { "epoch": 18.742382271468145, "grad_norm": 0.7472243309020996, "learning_rate": 2.9948333333333336e-05, "loss": 0.1577, "step": 10158 }, { "epoch": 18.744228993536474, "grad_norm": 0.7569506168365479, "learning_rate": 2.9948e-05, "loss": 0.1193, "step": 10159 }, { "epoch": 18.746075715604803, "grad_norm": 0.7633649110794067, "learning_rate": 2.9947666666666668e-05, "loss": 0.113, "step": 10160 }, { "epoch": 18.74792243767313, "grad_norm": 0.6256909370422363, "learning_rate": 2.9947333333333334e-05, "loss": 0.0973, "step": 10161 }, { "epoch": 18.749769159741458, "grad_norm": 1.0657498836517334, "learning_rate": 2.9947e-05, "loss": 0.118, "step": 10162 }, { "epoch": 18.751615881809787, "grad_norm": 0.7749153971672058, "learning_rate": 2.994666666666667e-05, "loss": 0.0741, "step": 10163 }, { "epoch": 18.753462603878116, "grad_norm": 0.4885929822921753, "learning_rate": 2.9946333333333335e-05, "loss": 0.0539, "step": 10164 }, { "epoch": 18.755309325946445, "grad_norm": 0.765285313129425, "learning_rate": 2.9946e-05, "loss": 0.0759, "step": 10165 }, { "epoch": 18.757156048014775, "grad_norm": 0.6439147591590881, "learning_rate": 2.9945666666666667e-05, "loss": 0.0714, "step": 10166 }, { "epoch": 18.759002770083104, "grad_norm": 0.5060761570930481, "learning_rate": 2.9945333333333336e-05, "loss": 0.0639, "step": 10167 }, { "epoch": 18.760849492151433, "grad_norm": 0.34558436274528503, "learning_rate": 2.9945e-05, "loss": 0.0341, "step": 10168 }, { "epoch": 18.76269621421976, "grad_norm": 0.6426031589508057, "learning_rate": 2.9944666666666668e-05, "loss": 0.0938, "step": 10169 }, { "epoch": 18.764542936288088, "grad_norm": 0.6749737858772278, "learning_rate": 2.9944333333333334e-05, "loss": 0.0304, "step": 10170 }, { "epoch": 18.766389658356417, "grad_norm": 0.3478706479072571, "learning_rate": 2.9944e-05, "loss": 0.0617, "step": 10171 }, { "epoch": 18.768236380424746, "grad_norm": 0.24540524184703827, "learning_rate": 2.994366666666667e-05, "loss": 0.0209, "step": 10172 }, { "epoch": 18.770083102493075, "grad_norm": 0.5011123418807983, "learning_rate": 2.9943333333333332e-05, "loss": 0.03, "step": 10173 }, { "epoch": 18.771929824561404, "grad_norm": 0.27377021312713623, "learning_rate": 2.9943e-05, "loss": 0.0177, "step": 10174 }, { "epoch": 18.773776546629733, "grad_norm": 0.1973041296005249, "learning_rate": 2.9942666666666667e-05, "loss": 0.0161, "step": 10175 }, { "epoch": 18.775623268698062, "grad_norm": 0.2237047255039215, "learning_rate": 2.9942333333333333e-05, "loss": 0.012, "step": 10176 }, { "epoch": 18.777469990766388, "grad_norm": 0.34094229340553284, "learning_rate": 2.9942e-05, "loss": 0.0444, "step": 10177 }, { "epoch": 18.779316712834717, "grad_norm": 0.4286072850227356, "learning_rate": 2.9941666666666668e-05, "loss": 0.0139, "step": 10178 }, { "epoch": 18.781163434903046, "grad_norm": 1.1234757900238037, "learning_rate": 2.9941333333333334e-05, "loss": 0.0222, "step": 10179 }, { "epoch": 18.783010156971375, "grad_norm": 0.4694407880306244, "learning_rate": 2.9941e-05, "loss": 0.0124, "step": 10180 }, { "epoch": 18.784856879039705, "grad_norm": 0.2294221818447113, "learning_rate": 2.994066666666667e-05, "loss": 0.0114, "step": 10181 }, { "epoch": 18.786703601108034, "grad_norm": 0.2664346396923065, "learning_rate": 2.9940333333333332e-05, "loss": 0.0152, "step": 10182 }, { "epoch": 18.788550323176363, "grad_norm": 0.2968711853027344, "learning_rate": 2.994e-05, "loss": 0.0098, "step": 10183 }, { "epoch": 18.790397045244692, "grad_norm": 0.39534589648246765, "learning_rate": 2.9939666666666667e-05, "loss": 0.017, "step": 10184 }, { "epoch": 18.792243767313018, "grad_norm": 0.19877228140830994, "learning_rate": 2.9939333333333333e-05, "loss": 0.0086, "step": 10185 }, { "epoch": 18.794090489381347, "grad_norm": 0.178855299949646, "learning_rate": 2.9939e-05, "loss": 0.0083, "step": 10186 }, { "epoch": 18.795937211449676, "grad_norm": 0.38069117069244385, "learning_rate": 2.993866666666667e-05, "loss": 0.0153, "step": 10187 }, { "epoch": 18.797783933518005, "grad_norm": 0.7297185659408569, "learning_rate": 2.9938333333333334e-05, "loss": 0.0485, "step": 10188 }, { "epoch": 18.799630655586334, "grad_norm": 0.32633522152900696, "learning_rate": 2.9938e-05, "loss": 0.0135, "step": 10189 }, { "epoch": 18.801477377654663, "grad_norm": 0.3482316732406616, "learning_rate": 2.993766666666667e-05, "loss": 0.0131, "step": 10190 }, { "epoch": 18.803324099722992, "grad_norm": 0.5848433375358582, "learning_rate": 2.9937333333333332e-05, "loss": 0.0304, "step": 10191 }, { "epoch": 18.80517082179132, "grad_norm": 0.46898210048675537, "learning_rate": 2.9937e-05, "loss": 0.0138, "step": 10192 }, { "epoch": 18.80701754385965, "grad_norm": 0.5427939295768738, "learning_rate": 2.9936666666666667e-05, "loss": 0.012, "step": 10193 }, { "epoch": 18.808864265927976, "grad_norm": 0.2933759391307831, "learning_rate": 2.9936333333333333e-05, "loss": 0.0093, "step": 10194 }, { "epoch": 18.810710987996305, "grad_norm": 0.4972042739391327, "learning_rate": 2.9936000000000003e-05, "loss": 0.0144, "step": 10195 }, { "epoch": 18.812557710064635, "grad_norm": 0.4755079448223114, "learning_rate": 2.993566666666667e-05, "loss": 0.0387, "step": 10196 }, { "epoch": 18.814404432132964, "grad_norm": 0.414993017911911, "learning_rate": 2.9935333333333335e-05, "loss": 0.0176, "step": 10197 }, { "epoch": 18.816251154201293, "grad_norm": 0.30449292063713074, "learning_rate": 2.9935e-05, "loss": 0.0084, "step": 10198 }, { "epoch": 18.818097876269622, "grad_norm": 0.595119059085846, "learning_rate": 2.993466666666667e-05, "loss": 0.0251, "step": 10199 }, { "epoch": 18.81994459833795, "grad_norm": 0.5565841794013977, "learning_rate": 2.9934333333333332e-05, "loss": 0.0178, "step": 10200 }, { "epoch": 18.82179132040628, "grad_norm": 0.17402859032154083, "learning_rate": 2.9934000000000002e-05, "loss": 0.0046, "step": 10201 }, { "epoch": 18.823638042474606, "grad_norm": 0.3507221043109894, "learning_rate": 2.9933666666666664e-05, "loss": 0.0089, "step": 10202 }, { "epoch": 18.825484764542935, "grad_norm": 0.5356447696685791, "learning_rate": 2.9933333333333334e-05, "loss": 0.037, "step": 10203 }, { "epoch": 18.827331486611264, "grad_norm": 0.41256019473075867, "learning_rate": 2.9933000000000003e-05, "loss": 0.0147, "step": 10204 }, { "epoch": 18.829178208679593, "grad_norm": 0.3081376254558563, "learning_rate": 2.9932666666666665e-05, "loss": 0.0118, "step": 10205 }, { "epoch": 18.831024930747922, "grad_norm": 0.45098060369491577, "learning_rate": 2.9932333333333335e-05, "loss": 0.0128, "step": 10206 }, { "epoch": 18.83287165281625, "grad_norm": 0.7322559356689453, "learning_rate": 2.9932e-05, "loss": 0.1536, "step": 10207 }, { "epoch": 18.83471837488458, "grad_norm": 0.6320260167121887, "learning_rate": 2.9931666666666667e-05, "loss": 0.1585, "step": 10208 }, { "epoch": 18.83656509695291, "grad_norm": 0.5659246444702148, "learning_rate": 2.9931333333333333e-05, "loss": 0.1664, "step": 10209 }, { "epoch": 18.83841181902124, "grad_norm": 0.38136017322540283, "learning_rate": 2.9931000000000002e-05, "loss": 0.0818, "step": 10210 }, { "epoch": 18.840258541089565, "grad_norm": 0.4973321557044983, "learning_rate": 2.9930666666666668e-05, "loss": 0.0997, "step": 10211 }, { "epoch": 18.842105263157894, "grad_norm": 0.4908638596534729, "learning_rate": 2.9930333333333334e-05, "loss": 0.1203, "step": 10212 }, { "epoch": 18.843951985226223, "grad_norm": 0.48943161964416504, "learning_rate": 2.9930000000000003e-05, "loss": 0.0635, "step": 10213 }, { "epoch": 18.845798707294552, "grad_norm": 0.7102023363113403, "learning_rate": 2.9929666666666666e-05, "loss": 0.0842, "step": 10214 }, { "epoch": 18.84764542936288, "grad_norm": 0.4362596571445465, "learning_rate": 2.9929333333333335e-05, "loss": 0.0639, "step": 10215 }, { "epoch": 18.84949215143121, "grad_norm": 0.4935445785522461, "learning_rate": 2.9929e-05, "loss": 0.0571, "step": 10216 }, { "epoch": 18.85133887349954, "grad_norm": 0.5463587641716003, "learning_rate": 2.9928666666666667e-05, "loss": 0.094, "step": 10217 }, { "epoch": 18.85318559556787, "grad_norm": 0.39658188819885254, "learning_rate": 2.9928333333333333e-05, "loss": 0.0355, "step": 10218 }, { "epoch": 18.855032317636194, "grad_norm": 0.4764830768108368, "learning_rate": 2.9928000000000002e-05, "loss": 0.0387, "step": 10219 }, { "epoch": 18.856879039704523, "grad_norm": 0.28915467858314514, "learning_rate": 2.9927666666666668e-05, "loss": 0.0298, "step": 10220 }, { "epoch": 18.858725761772853, "grad_norm": 0.6550969481468201, "learning_rate": 2.9927333333333334e-05, "loss": 0.0526, "step": 10221 }, { "epoch": 18.86057248384118, "grad_norm": 0.22818061709403992, "learning_rate": 2.9927000000000003e-05, "loss": 0.0323, "step": 10222 }, { "epoch": 18.86241920590951, "grad_norm": 0.7346168756484985, "learning_rate": 2.9926666666666666e-05, "loss": 0.0663, "step": 10223 }, { "epoch": 18.86426592797784, "grad_norm": 0.27641716599464417, "learning_rate": 2.9926333333333335e-05, "loss": 0.0141, "step": 10224 }, { "epoch": 18.86611265004617, "grad_norm": 0.32033631205558777, "learning_rate": 2.9926e-05, "loss": 0.0237, "step": 10225 }, { "epoch": 18.8679593721145, "grad_norm": 0.38354334235191345, "learning_rate": 2.9925666666666667e-05, "loss": 0.0234, "step": 10226 }, { "epoch": 18.869806094182824, "grad_norm": 0.13038446009159088, "learning_rate": 2.9925333333333333e-05, "loss": 0.0054, "step": 10227 }, { "epoch": 18.871652816251153, "grad_norm": 0.3191926181316376, "learning_rate": 2.9925000000000002e-05, "loss": 0.0218, "step": 10228 }, { "epoch": 18.873499538319482, "grad_norm": 0.26581940054893494, "learning_rate": 2.9924666666666668e-05, "loss": 0.0141, "step": 10229 }, { "epoch": 18.87534626038781, "grad_norm": 0.26856064796447754, "learning_rate": 2.9924333333333334e-05, "loss": 0.0152, "step": 10230 }, { "epoch": 18.87719298245614, "grad_norm": 0.5691299438476562, "learning_rate": 2.9924e-05, "loss": 0.0213, "step": 10231 }, { "epoch": 18.87903970452447, "grad_norm": 0.773888111114502, "learning_rate": 2.9923666666666666e-05, "loss": 0.0162, "step": 10232 }, { "epoch": 18.8808864265928, "grad_norm": 0.4445539116859436, "learning_rate": 2.9923333333333335e-05, "loss": 0.0156, "step": 10233 }, { "epoch": 18.882733148661128, "grad_norm": 0.30708056688308716, "learning_rate": 2.9922999999999998e-05, "loss": 0.0085, "step": 10234 }, { "epoch": 18.884579870729453, "grad_norm": 0.5381516814231873, "learning_rate": 2.9922666666666667e-05, "loss": 0.0181, "step": 10235 }, { "epoch": 18.886426592797783, "grad_norm": 0.2624804675579071, "learning_rate": 2.9922333333333333e-05, "loss": 0.0084, "step": 10236 }, { "epoch": 18.88827331486611, "grad_norm": 0.3261619508266449, "learning_rate": 2.9922e-05, "loss": 0.0096, "step": 10237 }, { "epoch": 18.89012003693444, "grad_norm": 0.3803158402442932, "learning_rate": 2.992166666666667e-05, "loss": 0.0132, "step": 10238 }, { "epoch": 18.89196675900277, "grad_norm": 0.29096710681915283, "learning_rate": 2.9921333333333334e-05, "loss": 0.0137, "step": 10239 }, { "epoch": 18.8938134810711, "grad_norm": 0.6219228506088257, "learning_rate": 2.9921e-05, "loss": 0.0155, "step": 10240 }, { "epoch": 18.89566020313943, "grad_norm": 0.6573033332824707, "learning_rate": 2.9920666666666666e-05, "loss": 0.0208, "step": 10241 }, { "epoch": 18.897506925207757, "grad_norm": 0.43783968687057495, "learning_rate": 2.9920333333333336e-05, "loss": 0.0149, "step": 10242 }, { "epoch": 18.899353647276087, "grad_norm": 0.44213783740997314, "learning_rate": 2.9919999999999998e-05, "loss": 0.0154, "step": 10243 }, { "epoch": 18.901200369344412, "grad_norm": 0.5574394464492798, "learning_rate": 2.9919666666666667e-05, "loss": 0.0177, "step": 10244 }, { "epoch": 18.90304709141274, "grad_norm": 0.2930018901824951, "learning_rate": 2.9919333333333337e-05, "loss": 0.0087, "step": 10245 }, { "epoch": 18.90489381348107, "grad_norm": 0.5472742915153503, "learning_rate": 2.9919e-05, "loss": 0.0123, "step": 10246 }, { "epoch": 18.9067405355494, "grad_norm": 0.33874809741973877, "learning_rate": 2.991866666666667e-05, "loss": 0.0141, "step": 10247 }, { "epoch": 18.90858725761773, "grad_norm": 0.6086728572845459, "learning_rate": 2.9918333333333335e-05, "loss": 0.0116, "step": 10248 }, { "epoch": 18.910433979686058, "grad_norm": 0.6606793999671936, "learning_rate": 2.9918e-05, "loss": 0.0206, "step": 10249 }, { "epoch": 18.912280701754387, "grad_norm": 0.8574153780937195, "learning_rate": 2.9917666666666666e-05, "loss": 0.0226, "step": 10250 }, { "epoch": 18.914127423822716, "grad_norm": 0.23669973015785217, "learning_rate": 2.9917333333333336e-05, "loss": 0.0108, "step": 10251 }, { "epoch": 18.91597414589104, "grad_norm": 0.6156405806541443, "learning_rate": 2.9917e-05, "loss": 0.0233, "step": 10252 }, { "epoch": 18.91782086795937, "grad_norm": 0.29926902055740356, "learning_rate": 2.9916666666666668e-05, "loss": 0.0107, "step": 10253 }, { "epoch": 18.9196675900277, "grad_norm": 0.2887798845767975, "learning_rate": 2.9916333333333337e-05, "loss": 0.0153, "step": 10254 }, { "epoch": 18.92151431209603, "grad_norm": 0.5958914756774902, "learning_rate": 2.9916e-05, "loss": 0.0135, "step": 10255 }, { "epoch": 18.92336103416436, "grad_norm": 0.6727198362350464, "learning_rate": 2.991566666666667e-05, "loss": 0.0115, "step": 10256 }, { "epoch": 18.925207756232687, "grad_norm": 0.5990599393844604, "learning_rate": 2.9915333333333335e-05, "loss": 0.1305, "step": 10257 }, { "epoch": 18.927054478301017, "grad_norm": 0.7151499390602112, "learning_rate": 2.9915e-05, "loss": 0.1737, "step": 10258 }, { "epoch": 18.928901200369346, "grad_norm": 0.5996337532997131, "learning_rate": 2.9914666666666667e-05, "loss": 0.1591, "step": 10259 }, { "epoch": 18.930747922437675, "grad_norm": 0.7856311202049255, "learning_rate": 2.9914333333333336e-05, "loss": 0.1495, "step": 10260 }, { "epoch": 18.932594644506, "grad_norm": 0.5247669219970703, "learning_rate": 2.9914000000000002e-05, "loss": 0.0875, "step": 10261 }, { "epoch": 18.93444136657433, "grad_norm": 0.5936292409896851, "learning_rate": 2.9913666666666668e-05, "loss": 0.0807, "step": 10262 }, { "epoch": 18.93628808864266, "grad_norm": 0.595765233039856, "learning_rate": 2.9913333333333334e-05, "loss": 0.0844, "step": 10263 }, { "epoch": 18.938134810710988, "grad_norm": 0.5866780877113342, "learning_rate": 2.9913e-05, "loss": 0.0498, "step": 10264 }, { "epoch": 18.939981532779317, "grad_norm": 0.40006527304649353, "learning_rate": 2.991266666666667e-05, "loss": 0.0411, "step": 10265 }, { "epoch": 18.941828254847646, "grad_norm": 0.3506452441215515, "learning_rate": 2.991233333333333e-05, "loss": 0.0972, "step": 10266 }, { "epoch": 18.943674976915975, "grad_norm": 0.2703535854816437, "learning_rate": 2.9912e-05, "loss": 0.0624, "step": 10267 }, { "epoch": 18.945521698984304, "grad_norm": 0.841524600982666, "learning_rate": 2.9911666666666667e-05, "loss": 0.0254, "step": 10268 }, { "epoch": 18.94736842105263, "grad_norm": 0.3244848847389221, "learning_rate": 2.9911333333333333e-05, "loss": 0.0193, "step": 10269 }, { "epoch": 18.94921514312096, "grad_norm": 3.271413564682007, "learning_rate": 2.9911000000000002e-05, "loss": 0.0191, "step": 10270 }, { "epoch": 18.95106186518929, "grad_norm": 0.24318455159664154, "learning_rate": 2.9910666666666668e-05, "loss": 0.0213, "step": 10271 }, { "epoch": 18.952908587257618, "grad_norm": 0.617788553237915, "learning_rate": 2.9910333333333334e-05, "loss": 0.0208, "step": 10272 }, { "epoch": 18.954755309325947, "grad_norm": 0.33688652515411377, "learning_rate": 2.991e-05, "loss": 0.0092, "step": 10273 }, { "epoch": 18.956602031394276, "grad_norm": 1.3016357421875, "learning_rate": 2.990966666666667e-05, "loss": 0.0223, "step": 10274 }, { "epoch": 18.958448753462605, "grad_norm": 0.3748088479042053, "learning_rate": 2.9909333333333332e-05, "loss": 0.0134, "step": 10275 }, { "epoch": 18.960295475530934, "grad_norm": 0.5534132719039917, "learning_rate": 2.9909e-05, "loss": 0.0253, "step": 10276 }, { "epoch": 18.96214219759926, "grad_norm": 0.4077107906341553, "learning_rate": 2.9908666666666667e-05, "loss": 0.0282, "step": 10277 }, { "epoch": 18.96398891966759, "grad_norm": 0.355773389339447, "learning_rate": 2.9908333333333333e-05, "loss": 0.019, "step": 10278 }, { "epoch": 18.965835641735918, "grad_norm": 0.42264899611473083, "learning_rate": 2.9908000000000002e-05, "loss": 0.009, "step": 10279 }, { "epoch": 18.967682363804247, "grad_norm": 0.40342891216278076, "learning_rate": 2.9907666666666668e-05, "loss": 0.0136, "step": 10280 }, { "epoch": 18.969529085872576, "grad_norm": 0.5260114669799805, "learning_rate": 2.9907333333333334e-05, "loss": 0.0631, "step": 10281 }, { "epoch": 18.971375807940905, "grad_norm": 0.46058377623558044, "learning_rate": 2.9907e-05, "loss": 0.0161, "step": 10282 }, { "epoch": 18.973222530009235, "grad_norm": 0.6268635988235474, "learning_rate": 2.990666666666667e-05, "loss": 0.0119, "step": 10283 }, { "epoch": 18.975069252077564, "grad_norm": 0.4749756157398224, "learning_rate": 2.9906333333333332e-05, "loss": 0.0122, "step": 10284 }, { "epoch": 18.97691597414589, "grad_norm": 0.2996837794780731, "learning_rate": 2.9906e-05, "loss": 0.0116, "step": 10285 }, { "epoch": 18.97876269621422, "grad_norm": 0.4519551694393158, "learning_rate": 2.9905666666666667e-05, "loss": 0.0142, "step": 10286 }, { "epoch": 18.980609418282548, "grad_norm": 0.30716121196746826, "learning_rate": 2.9905333333333333e-05, "loss": 0.0125, "step": 10287 }, { "epoch": 18.982456140350877, "grad_norm": 0.34883931279182434, "learning_rate": 2.9905000000000003e-05, "loss": 0.0156, "step": 10288 }, { "epoch": 18.984302862419206, "grad_norm": 0.4810369312763214, "learning_rate": 2.990466666666667e-05, "loss": 0.0261, "step": 10289 }, { "epoch": 18.986149584487535, "grad_norm": 0.46950608491897583, "learning_rate": 2.9904333333333334e-05, "loss": 0.015, "step": 10290 }, { "epoch": 18.987996306555864, "grad_norm": 0.30146342515945435, "learning_rate": 2.9904e-05, "loss": 0.017, "step": 10291 }, { "epoch": 18.989843028624193, "grad_norm": 0.5918002724647522, "learning_rate": 2.9903666666666666e-05, "loss": 0.021, "step": 10292 }, { "epoch": 18.991689750692522, "grad_norm": 0.25285613536834717, "learning_rate": 2.9903333333333332e-05, "loss": 0.0108, "step": 10293 }, { "epoch": 18.993536472760848, "grad_norm": 0.5188554525375366, "learning_rate": 2.9903e-05, "loss": 0.0269, "step": 10294 }, { "epoch": 18.995383194829177, "grad_norm": 0.22588178515434265, "learning_rate": 2.9902666666666667e-05, "loss": 0.0087, "step": 10295 }, { "epoch": 18.997229916897506, "grad_norm": 0.33000004291534424, "learning_rate": 2.9902333333333333e-05, "loss": 0.0087, "step": 10296 }, { "epoch": 18.999076638965835, "grad_norm": 0.39947277307510376, "learning_rate": 2.9902000000000003e-05, "loss": 0.012, "step": 10297 }, { "epoch": 19.0, "grad_norm": 0.390468955039978, "learning_rate": 2.9901666666666665e-05, "loss": 0.0301, "step": 10298 }, { "epoch": 19.00184672206833, "grad_norm": 0.7757726311683655, "learning_rate": 2.9901333333333335e-05, "loss": 0.1456, "step": 10299 }, { "epoch": 19.00369344413666, "grad_norm": 0.7913090586662292, "learning_rate": 2.9901e-05, "loss": 0.1691, "step": 10300 }, { "epoch": 19.005540166204987, "grad_norm": 0.677194356918335, "learning_rate": 2.9900666666666666e-05, "loss": 0.0949, "step": 10301 }, { "epoch": 19.007386888273317, "grad_norm": 0.788819432258606, "learning_rate": 2.9900333333333332e-05, "loss": 0.0753, "step": 10302 }, { "epoch": 19.009233610341642, "grad_norm": 0.5638797283172607, "learning_rate": 2.9900000000000002e-05, "loss": 0.0793, "step": 10303 }, { "epoch": 19.01108033240997, "grad_norm": 0.4615686237812042, "learning_rate": 2.9899666666666668e-05, "loss": 0.1133, "step": 10304 }, { "epoch": 19.0129270544783, "grad_norm": 0.5038096904754639, "learning_rate": 2.9899333333333334e-05, "loss": 0.0578, "step": 10305 }, { "epoch": 19.01477377654663, "grad_norm": 0.767895519733429, "learning_rate": 2.9899000000000003e-05, "loss": 0.0495, "step": 10306 }, { "epoch": 19.01662049861496, "grad_norm": 0.35079145431518555, "learning_rate": 2.9898666666666665e-05, "loss": 0.0444, "step": 10307 }, { "epoch": 19.018467220683288, "grad_norm": 0.43407732248306274, "learning_rate": 2.9898333333333335e-05, "loss": 0.0537, "step": 10308 }, { "epoch": 19.020313942751617, "grad_norm": 0.26736965775489807, "learning_rate": 2.9898e-05, "loss": 0.0311, "step": 10309 }, { "epoch": 19.022160664819946, "grad_norm": 0.45536327362060547, "learning_rate": 2.9897666666666667e-05, "loss": 0.0389, "step": 10310 }, { "epoch": 19.02400738688827, "grad_norm": 0.31458866596221924, "learning_rate": 2.9897333333333336e-05, "loss": 0.0386, "step": 10311 }, { "epoch": 19.0258541089566, "grad_norm": 0.2078089565038681, "learning_rate": 2.9897000000000002e-05, "loss": 0.0229, "step": 10312 }, { "epoch": 19.02770083102493, "grad_norm": 0.42538657784461975, "learning_rate": 2.9896666666666668e-05, "loss": 0.0267, "step": 10313 }, { "epoch": 19.02954755309326, "grad_norm": 0.7776997685432434, "learning_rate": 2.9896333333333334e-05, "loss": 0.0207, "step": 10314 }, { "epoch": 19.03139427516159, "grad_norm": 0.46976688504219055, "learning_rate": 2.9896000000000003e-05, "loss": 0.0272, "step": 10315 }, { "epoch": 19.033240997229917, "grad_norm": 0.1779782623052597, "learning_rate": 2.9895666666666666e-05, "loss": 0.0086, "step": 10316 }, { "epoch": 19.035087719298247, "grad_norm": 0.4215569794178009, "learning_rate": 2.9895333333333335e-05, "loss": 0.0153, "step": 10317 }, { "epoch": 19.036934441366576, "grad_norm": 0.37013107538223267, "learning_rate": 2.9895e-05, "loss": 0.0166, "step": 10318 }, { "epoch": 19.0387811634349, "grad_norm": 0.1785709410905838, "learning_rate": 2.9894666666666667e-05, "loss": 0.0082, "step": 10319 }, { "epoch": 19.04062788550323, "grad_norm": 0.7148463129997253, "learning_rate": 2.9894333333333336e-05, "loss": 0.0334, "step": 10320 }, { "epoch": 19.04247460757156, "grad_norm": 0.35422831773757935, "learning_rate": 2.9894e-05, "loss": 0.0128, "step": 10321 }, { "epoch": 19.04432132963989, "grad_norm": 0.19529224932193756, "learning_rate": 2.9893666666666668e-05, "loss": 0.0104, "step": 10322 }, { "epoch": 19.046168051708218, "grad_norm": 0.21543395519256592, "learning_rate": 2.9893333333333334e-05, "loss": 0.012, "step": 10323 }, { "epoch": 19.048014773776547, "grad_norm": 0.25531303882598877, "learning_rate": 2.9893e-05, "loss": 0.0082, "step": 10324 }, { "epoch": 19.049861495844876, "grad_norm": 0.20938219130039215, "learning_rate": 2.9892666666666666e-05, "loss": 0.0104, "step": 10325 }, { "epoch": 19.051708217913205, "grad_norm": 0.3913450837135315, "learning_rate": 2.9892333333333335e-05, "loss": 0.0077, "step": 10326 }, { "epoch": 19.053554939981534, "grad_norm": 0.16339915990829468, "learning_rate": 2.9891999999999998e-05, "loss": 0.0056, "step": 10327 }, { "epoch": 19.05540166204986, "grad_norm": 0.2552281618118286, "learning_rate": 2.9891666666666667e-05, "loss": 0.006, "step": 10328 }, { "epoch": 19.05724838411819, "grad_norm": 0.35827621817588806, "learning_rate": 2.9891333333333336e-05, "loss": 0.0098, "step": 10329 }, { "epoch": 19.05909510618652, "grad_norm": 0.2173592448234558, "learning_rate": 2.9891e-05, "loss": 0.0077, "step": 10330 }, { "epoch": 19.060941828254848, "grad_norm": 0.2610386312007904, "learning_rate": 2.9890666666666668e-05, "loss": 0.0131, "step": 10331 }, { "epoch": 19.062788550323177, "grad_norm": 0.18036872148513794, "learning_rate": 2.9890333333333334e-05, "loss": 0.0064, "step": 10332 }, { "epoch": 19.064635272391506, "grad_norm": 0.5067037343978882, "learning_rate": 2.989e-05, "loss": 0.0105, "step": 10333 }, { "epoch": 19.066481994459835, "grad_norm": 0.7099167704582214, "learning_rate": 2.9889666666666666e-05, "loss": 0.0082, "step": 10334 }, { "epoch": 19.068328716528164, "grad_norm": 0.1525428295135498, "learning_rate": 2.9889333333333335e-05, "loss": 0.0053, "step": 10335 }, { "epoch": 19.07017543859649, "grad_norm": 0.31399956345558167, "learning_rate": 2.9889e-05, "loss": 0.0113, "step": 10336 }, { "epoch": 19.07202216066482, "grad_norm": 0.45584020018577576, "learning_rate": 2.9888666666666667e-05, "loss": 0.0089, "step": 10337 }, { "epoch": 19.073868882733148, "grad_norm": 0.6998863220214844, "learning_rate": 2.9888333333333337e-05, "loss": 0.0135, "step": 10338 }, { "epoch": 19.075715604801477, "grad_norm": 0.3413309156894684, "learning_rate": 2.9888e-05, "loss": 0.0111, "step": 10339 }, { "epoch": 19.077562326869806, "grad_norm": 0.25519177317619324, "learning_rate": 2.988766666666667e-05, "loss": 0.0064, "step": 10340 }, { "epoch": 19.079409048938135, "grad_norm": 0.5262901186943054, "learning_rate": 2.9887333333333334e-05, "loss": 0.0077, "step": 10341 }, { "epoch": 19.081255771006465, "grad_norm": 0.9745499491691589, "learning_rate": 2.9887e-05, "loss": 0.0123, "step": 10342 }, { "epoch": 19.083102493074794, "grad_norm": 0.34378862380981445, "learning_rate": 2.9886666666666666e-05, "loss": 0.0072, "step": 10343 }, { "epoch": 19.08494921514312, "grad_norm": 0.300809770822525, "learning_rate": 2.9886333333333336e-05, "loss": 0.0063, "step": 10344 }, { "epoch": 19.08679593721145, "grad_norm": 0.34393367171287537, "learning_rate": 2.9886e-05, "loss": 0.0055, "step": 10345 }, { "epoch": 19.088642659279778, "grad_norm": 0.2126002013683319, "learning_rate": 2.9885666666666667e-05, "loss": 0.0057, "step": 10346 }, { "epoch": 19.090489381348107, "grad_norm": 0.394257515668869, "learning_rate": 2.9885333333333337e-05, "loss": 0.0123, "step": 10347 }, { "epoch": 19.092336103416436, "grad_norm": 0.4051159620285034, "learning_rate": 2.9885e-05, "loss": 0.031, "step": 10348 }, { "epoch": 19.094182825484765, "grad_norm": 0.7709018588066101, "learning_rate": 2.988466666666667e-05, "loss": 0.2503, "step": 10349 }, { "epoch": 19.096029547553094, "grad_norm": 0.6019572019577026, "learning_rate": 2.9884333333333335e-05, "loss": 0.1228, "step": 10350 }, { "epoch": 19.097876269621423, "grad_norm": 0.5292881727218628, "learning_rate": 2.9884e-05, "loss": 0.1702, "step": 10351 }, { "epoch": 19.099722991689752, "grad_norm": 0.5636563301086426, "learning_rate": 2.9883666666666666e-05, "loss": 0.1217, "step": 10352 }, { "epoch": 19.101569713758078, "grad_norm": 0.45421287417411804, "learning_rate": 2.9883333333333332e-05, "loss": 0.0916, "step": 10353 }, { "epoch": 19.103416435826407, "grad_norm": 0.6303958892822266, "learning_rate": 2.9883000000000002e-05, "loss": 0.0635, "step": 10354 }, { "epoch": 19.105263157894736, "grad_norm": 0.7112407088279724, "learning_rate": 2.9882666666666668e-05, "loss": 0.0863, "step": 10355 }, { "epoch": 19.107109879963065, "grad_norm": 0.6119093894958496, "learning_rate": 2.9882333333333334e-05, "loss": 0.0879, "step": 10356 }, { "epoch": 19.108956602031395, "grad_norm": 0.42396941781044006, "learning_rate": 2.9882e-05, "loss": 0.0857, "step": 10357 }, { "epoch": 19.110803324099724, "grad_norm": 0.6566585302352905, "learning_rate": 2.988166666666667e-05, "loss": 0.0547, "step": 10358 }, { "epoch": 19.112650046168053, "grad_norm": 0.37996527552604675, "learning_rate": 2.988133333333333e-05, "loss": 0.0465, "step": 10359 }, { "epoch": 19.114496768236382, "grad_norm": 0.8353615999221802, "learning_rate": 2.9881e-05, "loss": 0.0446, "step": 10360 }, { "epoch": 19.116343490304708, "grad_norm": 0.3163006901741028, "learning_rate": 2.988066666666667e-05, "loss": 0.0311, "step": 10361 }, { "epoch": 19.118190212373037, "grad_norm": 0.4427793323993683, "learning_rate": 2.9880333333333333e-05, "loss": 0.0476, "step": 10362 }, { "epoch": 19.120036934441366, "grad_norm": 0.24254950881004333, "learning_rate": 2.9880000000000002e-05, "loss": 0.0231, "step": 10363 }, { "epoch": 19.121883656509695, "grad_norm": 0.38209423422813416, "learning_rate": 2.9879666666666668e-05, "loss": 0.0591, "step": 10364 }, { "epoch": 19.123730378578024, "grad_norm": 0.18305137753486633, "learning_rate": 2.9879333333333334e-05, "loss": 0.0107, "step": 10365 }, { "epoch": 19.125577100646353, "grad_norm": 0.42425912618637085, "learning_rate": 2.9879e-05, "loss": 0.0102, "step": 10366 }, { "epoch": 19.127423822714682, "grad_norm": 0.24442718923091888, "learning_rate": 2.987866666666667e-05, "loss": 0.0418, "step": 10367 }, { "epoch": 19.12927054478301, "grad_norm": 0.3955996036529541, "learning_rate": 2.987833333333333e-05, "loss": 0.0108, "step": 10368 }, { "epoch": 19.131117266851337, "grad_norm": 0.1432962566614151, "learning_rate": 2.9878e-05, "loss": 0.0078, "step": 10369 }, { "epoch": 19.132963988919666, "grad_norm": 0.2589716911315918, "learning_rate": 2.987766666666667e-05, "loss": 0.0146, "step": 10370 }, { "epoch": 19.134810710987995, "grad_norm": 0.22195583581924438, "learning_rate": 2.9877333333333333e-05, "loss": 0.0093, "step": 10371 }, { "epoch": 19.136657433056325, "grad_norm": 0.1908663958311081, "learning_rate": 2.9877000000000002e-05, "loss": 0.0044, "step": 10372 }, { "epoch": 19.138504155124654, "grad_norm": 0.154612734913826, "learning_rate": 2.9876666666666668e-05, "loss": 0.0076, "step": 10373 }, { "epoch": 19.140350877192983, "grad_norm": 0.4131031334400177, "learning_rate": 2.9876333333333334e-05, "loss": 0.0117, "step": 10374 }, { "epoch": 19.142197599261312, "grad_norm": 0.3654535412788391, "learning_rate": 2.9876e-05, "loss": 0.0149, "step": 10375 }, { "epoch": 19.14404432132964, "grad_norm": 0.24689264595508575, "learning_rate": 2.987566666666667e-05, "loss": 0.0095, "step": 10376 }, { "epoch": 19.14589104339797, "grad_norm": 0.24347740411758423, "learning_rate": 2.9875333333333332e-05, "loss": 0.0082, "step": 10377 }, { "epoch": 19.147737765466296, "grad_norm": 0.531166672706604, "learning_rate": 2.9875e-05, "loss": 0.0578, "step": 10378 }, { "epoch": 19.149584487534625, "grad_norm": 0.4409041404724121, "learning_rate": 2.987466666666667e-05, "loss": 0.0162, "step": 10379 }, { "epoch": 19.151431209602954, "grad_norm": 0.35764533281326294, "learning_rate": 2.9874333333333333e-05, "loss": 0.0084, "step": 10380 }, { "epoch": 19.153277931671283, "grad_norm": 0.4136661887168884, "learning_rate": 2.9874000000000002e-05, "loss": 0.0075, "step": 10381 }, { "epoch": 19.155124653739612, "grad_norm": 1.1931157112121582, "learning_rate": 2.9873666666666665e-05, "loss": 0.0154, "step": 10382 }, { "epoch": 19.15697137580794, "grad_norm": 0.1597888022661209, "learning_rate": 2.9873333333333334e-05, "loss": 0.0054, "step": 10383 }, { "epoch": 19.15881809787627, "grad_norm": 0.4281260669231415, "learning_rate": 2.9873e-05, "loss": 0.0405, "step": 10384 }, { "epoch": 19.1606648199446, "grad_norm": 0.4385799169540405, "learning_rate": 2.9872666666666666e-05, "loss": 0.0112, "step": 10385 }, { "epoch": 19.162511542012926, "grad_norm": 0.22523953020572662, "learning_rate": 2.9872333333333335e-05, "loss": 0.0085, "step": 10386 }, { "epoch": 19.164358264081255, "grad_norm": 0.36893364787101746, "learning_rate": 2.9872e-05, "loss": 0.0204, "step": 10387 }, { "epoch": 19.166204986149584, "grad_norm": 0.7368447184562683, "learning_rate": 2.9871666666666667e-05, "loss": 0.0126, "step": 10388 }, { "epoch": 19.168051708217913, "grad_norm": 0.29873305559158325, "learning_rate": 2.9871333333333333e-05, "loss": 0.0117, "step": 10389 }, { "epoch": 19.169898430286242, "grad_norm": 0.4472907483577728, "learning_rate": 2.9871000000000003e-05, "loss": 0.009, "step": 10390 }, { "epoch": 19.17174515235457, "grad_norm": 0.9163946509361267, "learning_rate": 2.9870666666666665e-05, "loss": 0.0127, "step": 10391 }, { "epoch": 19.1735918744229, "grad_norm": 0.2877427935600281, "learning_rate": 2.9870333333333334e-05, "loss": 0.0112, "step": 10392 }, { "epoch": 19.17543859649123, "grad_norm": 0.5438757538795471, "learning_rate": 2.987e-05, "loss": 0.0092, "step": 10393 }, { "epoch": 19.177285318559555, "grad_norm": 0.34730347990989685, "learning_rate": 2.9869666666666666e-05, "loss": 0.0083, "step": 10394 }, { "epoch": 19.179132040627884, "grad_norm": 0.49923327565193176, "learning_rate": 2.9869333333333336e-05, "loss": 0.0143, "step": 10395 }, { "epoch": 19.180978762696213, "grad_norm": 0.19998763501644135, "learning_rate": 2.9869e-05, "loss": 0.0047, "step": 10396 }, { "epoch": 19.182825484764543, "grad_norm": 0.4820026457309723, "learning_rate": 2.9868666666666667e-05, "loss": 0.0136, "step": 10397 }, { "epoch": 19.18467220683287, "grad_norm": 0.5864433646202087, "learning_rate": 2.9868333333333333e-05, "loss": 0.0157, "step": 10398 }, { "epoch": 19.1865189289012, "grad_norm": 0.5068093538284302, "learning_rate": 2.9868000000000003e-05, "loss": 0.1275, "step": 10399 }, { "epoch": 19.18836565096953, "grad_norm": 0.4917617440223694, "learning_rate": 2.9867666666666665e-05, "loss": 0.1254, "step": 10400 }, { "epoch": 19.19021237303786, "grad_norm": 0.8632218837738037, "learning_rate": 2.9867333333333335e-05, "loss": 0.1655, "step": 10401 }, { "epoch": 19.19205909510619, "grad_norm": 0.4661218225955963, "learning_rate": 2.9867e-05, "loss": 0.0876, "step": 10402 }, { "epoch": 19.193905817174514, "grad_norm": 0.6275524497032166, "learning_rate": 2.9866666666666666e-05, "loss": 0.0873, "step": 10403 }, { "epoch": 19.195752539242843, "grad_norm": 0.4922047555446625, "learning_rate": 2.9866333333333336e-05, "loss": 0.0897, "step": 10404 }, { "epoch": 19.197599261311172, "grad_norm": 0.43131202459335327, "learning_rate": 2.9866000000000002e-05, "loss": 0.0591, "step": 10405 }, { "epoch": 19.1994459833795, "grad_norm": 0.536503255367279, "learning_rate": 2.9865666666666668e-05, "loss": 0.0911, "step": 10406 }, { "epoch": 19.20129270544783, "grad_norm": 0.3472905457019806, "learning_rate": 2.9865333333333334e-05, "loss": 0.0381, "step": 10407 }, { "epoch": 19.20313942751616, "grad_norm": 0.49963095784187317, "learning_rate": 2.9865000000000003e-05, "loss": 0.0509, "step": 10408 }, { "epoch": 19.20498614958449, "grad_norm": 0.48727062344551086, "learning_rate": 2.9864666666666665e-05, "loss": 0.0497, "step": 10409 }, { "epoch": 19.206832871652818, "grad_norm": 0.33055925369262695, "learning_rate": 2.9864333333333335e-05, "loss": 0.0492, "step": 10410 }, { "epoch": 19.208679593721143, "grad_norm": 0.4543503224849701, "learning_rate": 2.9864000000000004e-05, "loss": 0.0365, "step": 10411 }, { "epoch": 19.210526315789473, "grad_norm": 0.316006600856781, "learning_rate": 2.9863666666666667e-05, "loss": 0.0228, "step": 10412 }, { "epoch": 19.2123730378578, "grad_norm": 0.39846211671829224, "learning_rate": 2.9863333333333336e-05, "loss": 0.0144, "step": 10413 }, { "epoch": 19.21421975992613, "grad_norm": 0.32504454255104065, "learning_rate": 2.9863e-05, "loss": 0.0176, "step": 10414 }, { "epoch": 19.21606648199446, "grad_norm": 0.3825388252735138, "learning_rate": 2.9862666666666668e-05, "loss": 0.0137, "step": 10415 }, { "epoch": 19.21791320406279, "grad_norm": 0.276236355304718, "learning_rate": 2.9862333333333334e-05, "loss": 0.0174, "step": 10416 }, { "epoch": 19.21975992613112, "grad_norm": 0.31894567608833313, "learning_rate": 2.9862e-05, "loss": 0.0318, "step": 10417 }, { "epoch": 19.221606648199447, "grad_norm": 0.39254486560821533, "learning_rate": 2.9861666666666666e-05, "loss": 0.0322, "step": 10418 }, { "epoch": 19.223453370267773, "grad_norm": 0.3326973021030426, "learning_rate": 2.9861333333333335e-05, "loss": 0.0087, "step": 10419 }, { "epoch": 19.225300092336102, "grad_norm": 0.29619577527046204, "learning_rate": 2.9861e-05, "loss": 0.0089, "step": 10420 }, { "epoch": 19.22714681440443, "grad_norm": 0.3656005561351776, "learning_rate": 2.9860666666666667e-05, "loss": 0.0185, "step": 10421 }, { "epoch": 19.22899353647276, "grad_norm": 0.26014333963394165, "learning_rate": 2.9860333333333336e-05, "loss": 0.008, "step": 10422 }, { "epoch": 19.23084025854109, "grad_norm": 0.5052050948143005, "learning_rate": 2.986e-05, "loss": 0.0136, "step": 10423 }, { "epoch": 19.23268698060942, "grad_norm": 0.3282339572906494, "learning_rate": 2.9859666666666668e-05, "loss": 0.0141, "step": 10424 }, { "epoch": 19.234533702677748, "grad_norm": 0.33655408024787903, "learning_rate": 2.9859333333333334e-05, "loss": 0.0134, "step": 10425 }, { "epoch": 19.236380424746077, "grad_norm": 0.2944200336933136, "learning_rate": 2.9859e-05, "loss": 0.0103, "step": 10426 }, { "epoch": 19.238227146814406, "grad_norm": 0.49892765283584595, "learning_rate": 2.9858666666666666e-05, "loss": 0.0275, "step": 10427 }, { "epoch": 19.24007386888273, "grad_norm": 0.4131115972995758, "learning_rate": 2.9858333333333335e-05, "loss": 0.0096, "step": 10428 }, { "epoch": 19.24192059095106, "grad_norm": 0.2645561397075653, "learning_rate": 2.9858e-05, "loss": 0.0065, "step": 10429 }, { "epoch": 19.24376731301939, "grad_norm": 0.40666913986206055, "learning_rate": 2.9857666666666667e-05, "loss": 0.0111, "step": 10430 }, { "epoch": 19.24561403508772, "grad_norm": 0.22525639832019806, "learning_rate": 2.9857333333333336e-05, "loss": 0.0064, "step": 10431 }, { "epoch": 19.24746075715605, "grad_norm": 0.400501549243927, "learning_rate": 2.9857e-05, "loss": 0.0097, "step": 10432 }, { "epoch": 19.249307479224377, "grad_norm": 0.28048983216285706, "learning_rate": 2.9856666666666668e-05, "loss": 0.0093, "step": 10433 }, { "epoch": 19.251154201292707, "grad_norm": 0.21725383400917053, "learning_rate": 2.9856333333333334e-05, "loss": 0.0069, "step": 10434 }, { "epoch": 19.253000923361036, "grad_norm": 0.294262558221817, "learning_rate": 2.9856e-05, "loss": 0.0098, "step": 10435 }, { "epoch": 19.25484764542936, "grad_norm": 0.43420305848121643, "learning_rate": 2.985566666666667e-05, "loss": 0.0189, "step": 10436 }, { "epoch": 19.25669436749769, "grad_norm": 0.19349953532218933, "learning_rate": 2.9855333333333335e-05, "loss": 0.0043, "step": 10437 }, { "epoch": 19.25854108956602, "grad_norm": 0.26871469616889954, "learning_rate": 2.9855e-05, "loss": 0.0068, "step": 10438 }, { "epoch": 19.26038781163435, "grad_norm": 0.18552646040916443, "learning_rate": 2.9854666666666667e-05, "loss": 0.005, "step": 10439 }, { "epoch": 19.262234533702678, "grad_norm": 0.2697330117225647, "learning_rate": 2.9854333333333337e-05, "loss": 0.0094, "step": 10440 }, { "epoch": 19.264081255771007, "grad_norm": 0.47541746497154236, "learning_rate": 2.9854e-05, "loss": 0.0094, "step": 10441 }, { "epoch": 19.265927977839336, "grad_norm": 0.2967643141746521, "learning_rate": 2.985366666666667e-05, "loss": 0.0071, "step": 10442 }, { "epoch": 19.267774699907665, "grad_norm": 0.3433935046195984, "learning_rate": 2.985333333333333e-05, "loss": 0.0118, "step": 10443 }, { "epoch": 19.26962142197599, "grad_norm": 0.2878125011920929, "learning_rate": 2.9853e-05, "loss": 0.0088, "step": 10444 }, { "epoch": 19.27146814404432, "grad_norm": 0.8284724354743958, "learning_rate": 2.985266666666667e-05, "loss": 0.0102, "step": 10445 }, { "epoch": 19.27331486611265, "grad_norm": 0.244842529296875, "learning_rate": 2.9852333333333332e-05, "loss": 0.0118, "step": 10446 }, { "epoch": 19.27516158818098, "grad_norm": 0.3617515563964844, "learning_rate": 2.9852e-05, "loss": 0.0093, "step": 10447 }, { "epoch": 19.277008310249307, "grad_norm": 0.1818387806415558, "learning_rate": 2.9851666666666667e-05, "loss": 0.0045, "step": 10448 }, { "epoch": 19.278855032317637, "grad_norm": 0.6636005640029907, "learning_rate": 2.9851333333333333e-05, "loss": 0.1463, "step": 10449 }, { "epoch": 19.280701754385966, "grad_norm": 0.501599133014679, "learning_rate": 2.9851e-05, "loss": 0.1471, "step": 10450 }, { "epoch": 19.282548476454295, "grad_norm": 0.46136799454689026, "learning_rate": 2.985066666666667e-05, "loss": 0.1049, "step": 10451 }, { "epoch": 19.284395198522624, "grad_norm": 0.5088426470756531, "learning_rate": 2.985033333333333e-05, "loss": 0.1087, "step": 10452 }, { "epoch": 19.28624192059095, "grad_norm": 0.5903878808021545, "learning_rate": 2.985e-05, "loss": 0.1165, "step": 10453 }, { "epoch": 19.28808864265928, "grad_norm": 0.45213332772254944, "learning_rate": 2.984966666666667e-05, "loss": 0.066, "step": 10454 }, { "epoch": 19.289935364727608, "grad_norm": 0.4082927703857422, "learning_rate": 2.9849333333333332e-05, "loss": 0.0713, "step": 10455 }, { "epoch": 19.291782086795937, "grad_norm": 0.4666750729084015, "learning_rate": 2.9849000000000002e-05, "loss": 0.06, "step": 10456 }, { "epoch": 19.293628808864266, "grad_norm": 0.6132091283798218, "learning_rate": 2.9848666666666668e-05, "loss": 0.0729, "step": 10457 }, { "epoch": 19.295475530932595, "grad_norm": 0.37567561864852905, "learning_rate": 2.9848333333333334e-05, "loss": 0.0659, "step": 10458 }, { "epoch": 19.297322253000925, "grad_norm": 0.30930113792419434, "learning_rate": 2.9848e-05, "loss": 0.0263, "step": 10459 }, { "epoch": 19.299168975069254, "grad_norm": 0.3227839171886444, "learning_rate": 2.984766666666667e-05, "loss": 0.0456, "step": 10460 }, { "epoch": 19.30101569713758, "grad_norm": 0.451276034116745, "learning_rate": 2.9847333333333335e-05, "loss": 0.0269, "step": 10461 }, { "epoch": 19.30286241920591, "grad_norm": 0.754874050617218, "learning_rate": 2.9847e-05, "loss": 0.0179, "step": 10462 }, { "epoch": 19.304709141274238, "grad_norm": 0.20438246428966522, "learning_rate": 2.984666666666667e-05, "loss": 0.0115, "step": 10463 }, { "epoch": 19.306555863342567, "grad_norm": 0.2762463390827179, "learning_rate": 2.9846333333333333e-05, "loss": 0.0283, "step": 10464 }, { "epoch": 19.308402585410896, "grad_norm": 0.34477874636650085, "learning_rate": 2.9846000000000002e-05, "loss": 0.0225, "step": 10465 }, { "epoch": 19.310249307479225, "grad_norm": 0.5555924773216248, "learning_rate": 2.9845666666666668e-05, "loss": 0.0574, "step": 10466 }, { "epoch": 19.312096029547554, "grad_norm": 0.2817211449146271, "learning_rate": 2.9845333333333334e-05, "loss": 0.039, "step": 10467 }, { "epoch": 19.313942751615883, "grad_norm": 0.3302725553512573, "learning_rate": 2.9845e-05, "loss": 0.0107, "step": 10468 }, { "epoch": 19.31578947368421, "grad_norm": 0.29507848620414734, "learning_rate": 2.984466666666667e-05, "loss": 0.0093, "step": 10469 }, { "epoch": 19.317636195752538, "grad_norm": 0.2905113399028778, "learning_rate": 2.9844333333333335e-05, "loss": 0.0118, "step": 10470 }, { "epoch": 19.319482917820867, "grad_norm": 0.7147642970085144, "learning_rate": 2.9844e-05, "loss": 0.0197, "step": 10471 }, { "epoch": 19.321329639889196, "grad_norm": 0.14920946955680847, "learning_rate": 2.9843666666666667e-05, "loss": 0.0055, "step": 10472 }, { "epoch": 19.323176361957525, "grad_norm": 0.30008870363235474, "learning_rate": 2.9843333333333333e-05, "loss": 0.011, "step": 10473 }, { "epoch": 19.325023084025855, "grad_norm": 0.27200573682785034, "learning_rate": 2.9843000000000002e-05, "loss": 0.0085, "step": 10474 }, { "epoch": 19.326869806094184, "grad_norm": 0.2959764301776886, "learning_rate": 2.9842666666666665e-05, "loss": 0.0062, "step": 10475 }, { "epoch": 19.328716528162513, "grad_norm": 0.5562206506729126, "learning_rate": 2.9842333333333334e-05, "loss": 0.0083, "step": 10476 }, { "epoch": 19.330563250230842, "grad_norm": 0.42132067680358887, "learning_rate": 2.9842e-05, "loss": 0.0168, "step": 10477 }, { "epoch": 19.332409972299168, "grad_norm": 0.7031576037406921, "learning_rate": 2.9841666666666666e-05, "loss": 0.0189, "step": 10478 }, { "epoch": 19.334256694367497, "grad_norm": 0.23381592333316803, "learning_rate": 2.9841333333333335e-05, "loss": 0.0073, "step": 10479 }, { "epoch": 19.336103416435826, "grad_norm": 0.50837641954422, "learning_rate": 2.9841e-05, "loss": 0.0149, "step": 10480 }, { "epoch": 19.337950138504155, "grad_norm": 0.47945070266723633, "learning_rate": 2.9840666666666667e-05, "loss": 0.0093, "step": 10481 }, { "epoch": 19.339796860572484, "grad_norm": 0.1993795931339264, "learning_rate": 2.9840333333333333e-05, "loss": 0.0091, "step": 10482 }, { "epoch": 19.341643582640813, "grad_norm": 0.25505030155181885, "learning_rate": 2.9840000000000002e-05, "loss": 0.0047, "step": 10483 }, { "epoch": 19.343490304709142, "grad_norm": 0.2103731483221054, "learning_rate": 2.9839666666666665e-05, "loss": 0.0061, "step": 10484 }, { "epoch": 19.34533702677747, "grad_norm": 0.3151203989982605, "learning_rate": 2.9839333333333334e-05, "loss": 0.0113, "step": 10485 }, { "epoch": 19.347183748845797, "grad_norm": 0.365931898355484, "learning_rate": 2.9839000000000003e-05, "loss": 0.0097, "step": 10486 }, { "epoch": 19.349030470914126, "grad_norm": 0.25502514839172363, "learning_rate": 2.9838666666666666e-05, "loss": 0.0106, "step": 10487 }, { "epoch": 19.350877192982455, "grad_norm": 0.25653916597366333, "learning_rate": 2.9838333333333335e-05, "loss": 0.007, "step": 10488 }, { "epoch": 19.352723915050785, "grad_norm": 0.441602498292923, "learning_rate": 2.9838e-05, "loss": 0.0204, "step": 10489 }, { "epoch": 19.354570637119114, "grad_norm": 0.12745323777198792, "learning_rate": 2.9837666666666667e-05, "loss": 0.0051, "step": 10490 }, { "epoch": 19.356417359187443, "grad_norm": 0.3819082975387573, "learning_rate": 2.9837333333333333e-05, "loss": 0.0084, "step": 10491 }, { "epoch": 19.358264081255772, "grad_norm": 0.4130721688270569, "learning_rate": 2.9837000000000002e-05, "loss": 0.0104, "step": 10492 }, { "epoch": 19.3601108033241, "grad_norm": 0.2628061771392822, "learning_rate": 2.9836666666666665e-05, "loss": 0.0093, "step": 10493 }, { "epoch": 19.361957525392427, "grad_norm": 0.23770292103290558, "learning_rate": 2.9836333333333334e-05, "loss": 0.0052, "step": 10494 }, { "epoch": 19.363804247460756, "grad_norm": 0.4211592376232147, "learning_rate": 2.9836000000000004e-05, "loss": 0.0252, "step": 10495 }, { "epoch": 19.365650969529085, "grad_norm": 0.4491584002971649, "learning_rate": 2.9835666666666666e-05, "loss": 0.0161, "step": 10496 }, { "epoch": 19.367497691597414, "grad_norm": 0.38713496923446655, "learning_rate": 2.9835333333333336e-05, "loss": 0.0126, "step": 10497 }, { "epoch": 19.369344413665743, "grad_norm": 0.36408424377441406, "learning_rate": 2.9835e-05, "loss": 0.0093, "step": 10498 }, { "epoch": 19.371191135734072, "grad_norm": 0.767509937286377, "learning_rate": 2.9834666666666667e-05, "loss": 0.1899, "step": 10499 }, { "epoch": 19.3730378578024, "grad_norm": 0.5073058009147644, "learning_rate": 2.9834333333333333e-05, "loss": 0.1397, "step": 10500 }, { "epoch": 19.37488457987073, "grad_norm": 0.5196423530578613, "learning_rate": 2.9834000000000003e-05, "loss": 0.0866, "step": 10501 }, { "epoch": 19.37673130193906, "grad_norm": 0.4803215265274048, "learning_rate": 2.9833666666666665e-05, "loss": 0.0727, "step": 10502 }, { "epoch": 19.378578024007385, "grad_norm": 0.9240314364433289, "learning_rate": 2.9833333333333335e-05, "loss": 0.1469, "step": 10503 }, { "epoch": 19.380424746075715, "grad_norm": 0.3998948633670807, "learning_rate": 2.9833e-05, "loss": 0.0637, "step": 10504 }, { "epoch": 19.382271468144044, "grad_norm": 0.5939068794250488, "learning_rate": 2.9832666666666666e-05, "loss": 0.0628, "step": 10505 }, { "epoch": 19.384118190212373, "grad_norm": 0.38086938858032227, "learning_rate": 2.9832333333333336e-05, "loss": 0.066, "step": 10506 }, { "epoch": 19.385964912280702, "grad_norm": 0.41734108328819275, "learning_rate": 2.9831999999999998e-05, "loss": 0.0603, "step": 10507 }, { "epoch": 19.38781163434903, "grad_norm": 0.49323397874832153, "learning_rate": 2.9831666666666668e-05, "loss": 0.0567, "step": 10508 }, { "epoch": 19.38965835641736, "grad_norm": 0.3245604932308197, "learning_rate": 2.9831333333333334e-05, "loss": 0.0372, "step": 10509 }, { "epoch": 19.39150507848569, "grad_norm": 0.24436530470848083, "learning_rate": 2.9831e-05, "loss": 0.0352, "step": 10510 }, { "epoch": 19.393351800554015, "grad_norm": 0.35334378480911255, "learning_rate": 2.983066666666667e-05, "loss": 0.0754, "step": 10511 }, { "epoch": 19.395198522622344, "grad_norm": 0.38580289483070374, "learning_rate": 2.9830333333333335e-05, "loss": 0.0679, "step": 10512 }, { "epoch": 19.397045244690673, "grad_norm": 0.31692564487457275, "learning_rate": 2.983e-05, "loss": 0.0186, "step": 10513 }, { "epoch": 19.398891966759003, "grad_norm": 0.31757763028144836, "learning_rate": 2.9829666666666667e-05, "loss": 0.0256, "step": 10514 }, { "epoch": 19.40073868882733, "grad_norm": 0.16572915017604828, "learning_rate": 2.9829333333333336e-05, "loss": 0.011, "step": 10515 }, { "epoch": 19.40258541089566, "grad_norm": 0.4005686342716217, "learning_rate": 2.9829e-05, "loss": 0.017, "step": 10516 }, { "epoch": 19.40443213296399, "grad_norm": 0.33007410168647766, "learning_rate": 2.9828666666666668e-05, "loss": 0.0127, "step": 10517 }, { "epoch": 19.40627885503232, "grad_norm": 0.09764605760574341, "learning_rate": 2.9828333333333334e-05, "loss": 0.0049, "step": 10518 }, { "epoch": 19.408125577100645, "grad_norm": 0.29652926325798035, "learning_rate": 2.9828e-05, "loss": 0.0177, "step": 10519 }, { "epoch": 19.409972299168974, "grad_norm": 0.27024373412132263, "learning_rate": 2.982766666666667e-05, "loss": 0.0134, "step": 10520 }, { "epoch": 19.411819021237303, "grad_norm": 0.18943774700164795, "learning_rate": 2.9827333333333335e-05, "loss": 0.0157, "step": 10521 }, { "epoch": 19.413665743305632, "grad_norm": 0.11868040263652802, "learning_rate": 2.9827e-05, "loss": 0.0042, "step": 10522 }, { "epoch": 19.41551246537396, "grad_norm": 0.611986517906189, "learning_rate": 2.9826666666666667e-05, "loss": 0.0121, "step": 10523 }, { "epoch": 19.41735918744229, "grad_norm": 0.3093113899230957, "learning_rate": 2.9826333333333336e-05, "loss": 0.0242, "step": 10524 }, { "epoch": 19.41920590951062, "grad_norm": 0.7205303311347961, "learning_rate": 2.9826e-05, "loss": 0.0176, "step": 10525 }, { "epoch": 19.42105263157895, "grad_norm": 0.36200499534606934, "learning_rate": 2.9825666666666668e-05, "loss": 0.0125, "step": 10526 }, { "epoch": 19.422899353647278, "grad_norm": 0.2035263627767563, "learning_rate": 2.9825333333333334e-05, "loss": 0.0099, "step": 10527 }, { "epoch": 19.424746075715603, "grad_norm": 0.27171552181243896, "learning_rate": 2.9825e-05, "loss": 0.0079, "step": 10528 }, { "epoch": 19.426592797783933, "grad_norm": 0.3976079523563385, "learning_rate": 2.982466666666667e-05, "loss": 0.0458, "step": 10529 }, { "epoch": 19.42843951985226, "grad_norm": 0.19442453980445862, "learning_rate": 2.9824333333333335e-05, "loss": 0.0085, "step": 10530 }, { "epoch": 19.43028624192059, "grad_norm": 0.30339083075523376, "learning_rate": 2.9824e-05, "loss": 0.0115, "step": 10531 }, { "epoch": 19.43213296398892, "grad_norm": 1.2128410339355469, "learning_rate": 2.9823666666666667e-05, "loss": 0.0042, "step": 10532 }, { "epoch": 19.43397968605725, "grad_norm": 0.8668030500411987, "learning_rate": 2.9823333333333333e-05, "loss": 0.0131, "step": 10533 }, { "epoch": 19.43582640812558, "grad_norm": 0.1962207853794098, "learning_rate": 2.9823e-05, "loss": 0.0043, "step": 10534 }, { "epoch": 19.437673130193907, "grad_norm": 0.21301503479480743, "learning_rate": 2.9822666666666668e-05, "loss": 0.0068, "step": 10535 }, { "epoch": 19.439519852262233, "grad_norm": 0.7212603092193604, "learning_rate": 2.9822333333333334e-05, "loss": 0.0152, "step": 10536 }, { "epoch": 19.441366574330562, "grad_norm": 0.4302648901939392, "learning_rate": 2.9822e-05, "loss": 0.0095, "step": 10537 }, { "epoch": 19.44321329639889, "grad_norm": 0.2859456539154053, "learning_rate": 2.982166666666667e-05, "loss": 0.0051, "step": 10538 }, { "epoch": 19.44506001846722, "grad_norm": 0.2743951976299286, "learning_rate": 2.9821333333333332e-05, "loss": 0.0076, "step": 10539 }, { "epoch": 19.44690674053555, "grad_norm": 0.7187720537185669, "learning_rate": 2.9821e-05, "loss": 0.0136, "step": 10540 }, { "epoch": 19.44875346260388, "grad_norm": 0.4372936487197876, "learning_rate": 2.9820666666666667e-05, "loss": 0.0122, "step": 10541 }, { "epoch": 19.450600184672208, "grad_norm": 0.5092134475708008, "learning_rate": 2.9820333333333333e-05, "loss": 0.0155, "step": 10542 }, { "epoch": 19.452446906740537, "grad_norm": 0.41750380396842957, "learning_rate": 2.982e-05, "loss": 0.0096, "step": 10543 }, { "epoch": 19.454293628808863, "grad_norm": 0.5092931985855103, "learning_rate": 2.981966666666667e-05, "loss": 0.0092, "step": 10544 }, { "epoch": 19.45614035087719, "grad_norm": 0.25309839844703674, "learning_rate": 2.9819333333333334e-05, "loss": 0.0116, "step": 10545 }, { "epoch": 19.45798707294552, "grad_norm": 0.6120051145553589, "learning_rate": 2.9819e-05, "loss": 0.0154, "step": 10546 }, { "epoch": 19.45983379501385, "grad_norm": 0.2491397112607956, "learning_rate": 2.981866666666667e-05, "loss": 0.0083, "step": 10547 }, { "epoch": 19.46168051708218, "grad_norm": 0.3732081353664398, "learning_rate": 2.9818333333333332e-05, "loss": 0.0104, "step": 10548 }, { "epoch": 19.46352723915051, "grad_norm": 0.5665434002876282, "learning_rate": 2.9818e-05, "loss": 0.1143, "step": 10549 }, { "epoch": 19.465373961218837, "grad_norm": 0.5090430974960327, "learning_rate": 2.9817666666666667e-05, "loss": 0.1119, "step": 10550 }, { "epoch": 19.467220683287167, "grad_norm": 0.7848197817802429, "learning_rate": 2.9817333333333333e-05, "loss": 0.164, "step": 10551 }, { "epoch": 19.469067405355496, "grad_norm": 0.6643295288085938, "learning_rate": 2.9817e-05, "loss": 0.1209, "step": 10552 }, { "epoch": 19.47091412742382, "grad_norm": 0.7260629534721375, "learning_rate": 2.981666666666667e-05, "loss": 0.1687, "step": 10553 }, { "epoch": 19.47276084949215, "grad_norm": 0.575410783290863, "learning_rate": 2.9816333333333335e-05, "loss": 0.0625, "step": 10554 }, { "epoch": 19.47460757156048, "grad_norm": 0.5349397659301758, "learning_rate": 2.9816e-05, "loss": 0.0714, "step": 10555 }, { "epoch": 19.47645429362881, "grad_norm": 0.39579540491104126, "learning_rate": 2.981566666666667e-05, "loss": 0.0591, "step": 10556 }, { "epoch": 19.478301015697138, "grad_norm": 0.47131359577178955, "learning_rate": 2.9815333333333332e-05, "loss": 0.0645, "step": 10557 }, { "epoch": 19.480147737765467, "grad_norm": 0.42419734597206116, "learning_rate": 2.9815e-05, "loss": 0.0878, "step": 10558 }, { "epoch": 19.481994459833796, "grad_norm": 0.5721330642700195, "learning_rate": 2.9814666666666668e-05, "loss": 0.0719, "step": 10559 }, { "epoch": 19.483841181902125, "grad_norm": 0.3947785794734955, "learning_rate": 2.9814333333333334e-05, "loss": 0.0369, "step": 10560 }, { "epoch": 19.48568790397045, "grad_norm": 0.4918561577796936, "learning_rate": 2.9814000000000003e-05, "loss": 0.0688, "step": 10561 }, { "epoch": 19.48753462603878, "grad_norm": 0.9153860807418823, "learning_rate": 2.981366666666667e-05, "loss": 0.0445, "step": 10562 }, { "epoch": 19.48938134810711, "grad_norm": 0.5050480961799622, "learning_rate": 2.9813333333333335e-05, "loss": 0.0213, "step": 10563 }, { "epoch": 19.49122807017544, "grad_norm": 0.28121042251586914, "learning_rate": 2.9813e-05, "loss": 0.0113, "step": 10564 }, { "epoch": 19.493074792243767, "grad_norm": 0.5272451043128967, "learning_rate": 2.9812666666666667e-05, "loss": 0.0162, "step": 10565 }, { "epoch": 19.494921514312097, "grad_norm": 0.29827800393104553, "learning_rate": 2.9812333333333333e-05, "loss": 0.0086, "step": 10566 }, { "epoch": 19.496768236380426, "grad_norm": 0.19304965436458588, "learning_rate": 2.9812000000000002e-05, "loss": 0.0139, "step": 10567 }, { "epoch": 19.498614958448755, "grad_norm": 0.440072625875473, "learning_rate": 2.9811666666666664e-05, "loss": 0.0164, "step": 10568 }, { "epoch": 19.50046168051708, "grad_norm": 0.16277796030044556, "learning_rate": 2.9811333333333334e-05, "loss": 0.0073, "step": 10569 }, { "epoch": 19.50230840258541, "grad_norm": 0.22500938177108765, "learning_rate": 2.9811000000000003e-05, "loss": 0.0063, "step": 10570 }, { "epoch": 19.50415512465374, "grad_norm": 0.20329682528972626, "learning_rate": 2.9810666666666666e-05, "loss": 0.0084, "step": 10571 }, { "epoch": 19.506001846722068, "grad_norm": 0.5552082657814026, "learning_rate": 2.9810333333333335e-05, "loss": 0.0118, "step": 10572 }, { "epoch": 19.507848568790397, "grad_norm": 0.4801299273967743, "learning_rate": 2.981e-05, "loss": 0.0174, "step": 10573 }, { "epoch": 19.509695290858726, "grad_norm": 0.6265919208526611, "learning_rate": 2.9809666666666667e-05, "loss": 0.0325, "step": 10574 }, { "epoch": 19.511542012927055, "grad_norm": 0.24671681225299835, "learning_rate": 2.9809333333333333e-05, "loss": 0.0067, "step": 10575 }, { "epoch": 19.513388734995385, "grad_norm": 0.28970077633857727, "learning_rate": 2.9809000000000002e-05, "loss": 0.0059, "step": 10576 }, { "epoch": 19.51523545706371, "grad_norm": 0.24169009923934937, "learning_rate": 2.9808666666666665e-05, "loss": 0.0088, "step": 10577 }, { "epoch": 19.51708217913204, "grad_norm": 0.2239508479833603, "learning_rate": 2.9808333333333334e-05, "loss": 0.0058, "step": 10578 }, { "epoch": 19.51892890120037, "grad_norm": 0.3484097719192505, "learning_rate": 2.9808000000000003e-05, "loss": 0.0085, "step": 10579 }, { "epoch": 19.520775623268698, "grad_norm": 0.35717564821243286, "learning_rate": 2.9807666666666666e-05, "loss": 0.0077, "step": 10580 }, { "epoch": 19.522622345337027, "grad_norm": 0.30840328335762024, "learning_rate": 2.9807333333333335e-05, "loss": 0.0074, "step": 10581 }, { "epoch": 19.524469067405356, "grad_norm": 0.3130101263523102, "learning_rate": 2.9807e-05, "loss": 0.0137, "step": 10582 }, { "epoch": 19.526315789473685, "grad_norm": 0.3575838506221771, "learning_rate": 2.9806666666666667e-05, "loss": 0.0114, "step": 10583 }, { "epoch": 19.528162511542014, "grad_norm": 0.2482454925775528, "learning_rate": 2.9806333333333333e-05, "loss": 0.0052, "step": 10584 }, { "epoch": 19.530009233610343, "grad_norm": 0.4640428125858307, "learning_rate": 2.9806000000000002e-05, "loss": 0.0395, "step": 10585 }, { "epoch": 19.53185595567867, "grad_norm": 0.5307456851005554, "learning_rate": 2.9805666666666668e-05, "loss": 0.0148, "step": 10586 }, { "epoch": 19.533702677746998, "grad_norm": 0.583564281463623, "learning_rate": 2.9805333333333334e-05, "loss": 0.0129, "step": 10587 }, { "epoch": 19.535549399815327, "grad_norm": 0.4926765561103821, "learning_rate": 2.9805000000000003e-05, "loss": 0.0093, "step": 10588 }, { "epoch": 19.537396121883656, "grad_norm": 0.5021626949310303, "learning_rate": 2.9804666666666666e-05, "loss": 0.0101, "step": 10589 }, { "epoch": 19.539242843951985, "grad_norm": 0.36017680168151855, "learning_rate": 2.9804333333333335e-05, "loss": 0.0085, "step": 10590 }, { "epoch": 19.541089566020315, "grad_norm": 0.6824921369552612, "learning_rate": 2.9804e-05, "loss": 0.0115, "step": 10591 }, { "epoch": 19.542936288088644, "grad_norm": 0.5039044618606567, "learning_rate": 2.9803666666666667e-05, "loss": 0.014, "step": 10592 }, { "epoch": 19.544783010156973, "grad_norm": 0.6053094267845154, "learning_rate": 2.9803333333333333e-05, "loss": 0.0114, "step": 10593 }, { "epoch": 19.5466297322253, "grad_norm": 0.48566383123397827, "learning_rate": 2.9803e-05, "loss": 0.0129, "step": 10594 }, { "epoch": 19.548476454293628, "grad_norm": 0.609473705291748, "learning_rate": 2.980266666666667e-05, "loss": 0.0178, "step": 10595 }, { "epoch": 19.550323176361957, "grad_norm": 0.6189708113670349, "learning_rate": 2.9802333333333334e-05, "loss": 0.0345, "step": 10596 }, { "epoch": 19.552169898430286, "grad_norm": 0.3618892729282379, "learning_rate": 2.9802e-05, "loss": 0.0121, "step": 10597 }, { "epoch": 19.554016620498615, "grad_norm": 0.42415696382522583, "learning_rate": 2.9801666666666666e-05, "loss": 0.0184, "step": 10598 }, { "epoch": 19.555863342566944, "grad_norm": 0.5935088992118835, "learning_rate": 2.9801333333333336e-05, "loss": 0.1555, "step": 10599 }, { "epoch": 19.557710064635273, "grad_norm": 0.6377758979797363, "learning_rate": 2.9800999999999998e-05, "loss": 0.1726, "step": 10600 }, { "epoch": 19.559556786703602, "grad_norm": 0.6348208785057068, "learning_rate": 2.9800666666666667e-05, "loss": 0.1333, "step": 10601 }, { "epoch": 19.56140350877193, "grad_norm": 0.4387953579425812, "learning_rate": 2.9800333333333333e-05, "loss": 0.0984, "step": 10602 }, { "epoch": 19.563250230840257, "grad_norm": 0.3931863307952881, "learning_rate": 2.98e-05, "loss": 0.078, "step": 10603 }, { "epoch": 19.565096952908586, "grad_norm": 0.4532827138900757, "learning_rate": 2.979966666666667e-05, "loss": 0.0909, "step": 10604 }, { "epoch": 19.566943674976915, "grad_norm": 0.4221654236316681, "learning_rate": 2.9799333333333335e-05, "loss": 0.0949, "step": 10605 }, { "epoch": 19.568790397045245, "grad_norm": 0.4770489037036896, "learning_rate": 2.9799e-05, "loss": 0.0448, "step": 10606 }, { "epoch": 19.570637119113574, "grad_norm": 0.4773814380168915, "learning_rate": 2.9798666666666666e-05, "loss": 0.0767, "step": 10607 }, { "epoch": 19.572483841181903, "grad_norm": 0.504423975944519, "learning_rate": 2.9798333333333336e-05, "loss": 0.0507, "step": 10608 }, { "epoch": 19.574330563250232, "grad_norm": 0.6374596953392029, "learning_rate": 2.9797999999999998e-05, "loss": 0.0608, "step": 10609 }, { "epoch": 19.57617728531856, "grad_norm": 0.4656340479850769, "learning_rate": 2.9797666666666668e-05, "loss": 0.0768, "step": 10610 }, { "epoch": 19.578024007386887, "grad_norm": 0.8195959329605103, "learning_rate": 2.9797333333333337e-05, "loss": 0.0287, "step": 10611 }, { "epoch": 19.579870729455216, "grad_norm": 0.2056460976600647, "learning_rate": 2.9797e-05, "loss": 0.0167, "step": 10612 }, { "epoch": 19.581717451523545, "grad_norm": 0.33398884534835815, "learning_rate": 2.979666666666667e-05, "loss": 0.0303, "step": 10613 }, { "epoch": 19.583564173591874, "grad_norm": 0.37782248854637146, "learning_rate": 2.9796333333333335e-05, "loss": 0.0335, "step": 10614 }, { "epoch": 19.585410895660203, "grad_norm": 0.2887199819087982, "learning_rate": 2.9796e-05, "loss": 0.0159, "step": 10615 }, { "epoch": 19.587257617728532, "grad_norm": 0.3071005046367645, "learning_rate": 2.9795666666666667e-05, "loss": 0.0184, "step": 10616 }, { "epoch": 19.58910433979686, "grad_norm": 0.3234884738922119, "learning_rate": 2.9795333333333336e-05, "loss": 0.0149, "step": 10617 }, { "epoch": 19.59095106186519, "grad_norm": 0.12488967925310135, "learning_rate": 2.9795e-05, "loss": 0.0044, "step": 10618 }, { "epoch": 19.592797783933516, "grad_norm": 0.2693093717098236, "learning_rate": 2.9794666666666668e-05, "loss": 0.0404, "step": 10619 }, { "epoch": 19.594644506001845, "grad_norm": 0.3381086587905884, "learning_rate": 2.9794333333333337e-05, "loss": 0.0104, "step": 10620 }, { "epoch": 19.596491228070175, "grad_norm": 0.5766419172286987, "learning_rate": 2.9794e-05, "loss": 0.0143, "step": 10621 }, { "epoch": 19.598337950138504, "grad_norm": 0.15342627465724945, "learning_rate": 2.979366666666667e-05, "loss": 0.0057, "step": 10622 }, { "epoch": 19.600184672206833, "grad_norm": 0.5277054309844971, "learning_rate": 2.979333333333333e-05, "loss": 0.0134, "step": 10623 }, { "epoch": 19.602031394275162, "grad_norm": 0.19860132038593292, "learning_rate": 2.9793e-05, "loss": 0.0079, "step": 10624 }, { "epoch": 19.60387811634349, "grad_norm": 0.4835110008716583, "learning_rate": 2.9792666666666667e-05, "loss": 0.0112, "step": 10625 }, { "epoch": 19.60572483841182, "grad_norm": 0.26355403661727905, "learning_rate": 2.9792333333333333e-05, "loss": 0.0431, "step": 10626 }, { "epoch": 19.607571560480146, "grad_norm": 0.4434733986854553, "learning_rate": 2.9792e-05, "loss": 0.0167, "step": 10627 }, { "epoch": 19.609418282548475, "grad_norm": 0.5944486260414124, "learning_rate": 2.9791666666666668e-05, "loss": 0.02, "step": 10628 }, { "epoch": 19.611265004616804, "grad_norm": 0.49193495512008667, "learning_rate": 2.9791333333333334e-05, "loss": 0.0188, "step": 10629 }, { "epoch": 19.613111726685133, "grad_norm": 0.5033032298088074, "learning_rate": 2.9791e-05, "loss": 0.0112, "step": 10630 }, { "epoch": 19.614958448753463, "grad_norm": 0.4277038872241974, "learning_rate": 2.979066666666667e-05, "loss": 0.0142, "step": 10631 }, { "epoch": 19.61680517082179, "grad_norm": 0.20894268155097961, "learning_rate": 2.9790333333333332e-05, "loss": 0.0051, "step": 10632 }, { "epoch": 19.61865189289012, "grad_norm": 0.22185315191745758, "learning_rate": 2.979e-05, "loss": 0.0087, "step": 10633 }, { "epoch": 19.62049861495845, "grad_norm": 0.26102548837661743, "learning_rate": 2.9789666666666667e-05, "loss": 0.0075, "step": 10634 }, { "epoch": 19.62234533702678, "grad_norm": 0.5750803351402283, "learning_rate": 2.9789333333333333e-05, "loss": 0.0062, "step": 10635 }, { "epoch": 19.624192059095105, "grad_norm": 0.19431185722351074, "learning_rate": 2.9789000000000002e-05, "loss": 0.0078, "step": 10636 }, { "epoch": 19.626038781163434, "grad_norm": 1.0597251653671265, "learning_rate": 2.9788666666666668e-05, "loss": 0.0333, "step": 10637 }, { "epoch": 19.627885503231763, "grad_norm": 0.5158454775810242, "learning_rate": 2.9788333333333334e-05, "loss": 0.0161, "step": 10638 }, { "epoch": 19.629732225300092, "grad_norm": 0.33750757575035095, "learning_rate": 2.9788e-05, "loss": 0.0082, "step": 10639 }, { "epoch": 19.63157894736842, "grad_norm": 0.5329627990722656, "learning_rate": 2.978766666666667e-05, "loss": 0.0166, "step": 10640 }, { "epoch": 19.63342566943675, "grad_norm": 0.7116667628288269, "learning_rate": 2.9787333333333332e-05, "loss": 0.0176, "step": 10641 }, { "epoch": 19.63527239150508, "grad_norm": 0.2800839841365814, "learning_rate": 2.9787e-05, "loss": 0.0074, "step": 10642 }, { "epoch": 19.63711911357341, "grad_norm": 0.556308925151825, "learning_rate": 2.9786666666666667e-05, "loss": 0.0205, "step": 10643 }, { "epoch": 19.638965835641734, "grad_norm": 0.8026602864265442, "learning_rate": 2.9786333333333333e-05, "loss": 0.0147, "step": 10644 }, { "epoch": 19.640812557710063, "grad_norm": 0.4995995759963989, "learning_rate": 2.9786000000000002e-05, "loss": 0.0106, "step": 10645 }, { "epoch": 19.642659279778393, "grad_norm": 0.556323230266571, "learning_rate": 2.978566666666667e-05, "loss": 0.0162, "step": 10646 }, { "epoch": 19.64450600184672, "grad_norm": 0.5479616522789001, "learning_rate": 2.9785333333333334e-05, "loss": 0.0109, "step": 10647 }, { "epoch": 19.64635272391505, "grad_norm": 0.9250813126564026, "learning_rate": 2.9785e-05, "loss": 0.0098, "step": 10648 }, { "epoch": 19.64819944598338, "grad_norm": 1.283340334892273, "learning_rate": 2.978466666666667e-05, "loss": 0.2004, "step": 10649 }, { "epoch": 19.65004616805171, "grad_norm": 0.6739901900291443, "learning_rate": 2.9784333333333332e-05, "loss": 0.217, "step": 10650 }, { "epoch": 19.65189289012004, "grad_norm": 0.5376523733139038, "learning_rate": 2.9784e-05, "loss": 0.1042, "step": 10651 }, { "epoch": 19.653739612188367, "grad_norm": 0.7716366648674011, "learning_rate": 2.9783666666666667e-05, "loss": 0.1006, "step": 10652 }, { "epoch": 19.655586334256693, "grad_norm": 0.4473935663700104, "learning_rate": 2.9783333333333333e-05, "loss": 0.0795, "step": 10653 }, { "epoch": 19.657433056325022, "grad_norm": 0.47197431325912476, "learning_rate": 2.9783000000000003e-05, "loss": 0.0708, "step": 10654 }, { "epoch": 19.65927977839335, "grad_norm": 0.530225396156311, "learning_rate": 2.9782666666666665e-05, "loss": 0.0684, "step": 10655 }, { "epoch": 19.66112650046168, "grad_norm": 0.3782411813735962, "learning_rate": 2.9782333333333335e-05, "loss": 0.0747, "step": 10656 }, { "epoch": 19.66297322253001, "grad_norm": 0.5400944352149963, "learning_rate": 2.9782e-05, "loss": 0.0515, "step": 10657 }, { "epoch": 19.66481994459834, "grad_norm": 0.6011316776275635, "learning_rate": 2.9781666666666666e-05, "loss": 0.0601, "step": 10658 }, { "epoch": 19.666666666666668, "grad_norm": 0.38033759593963623, "learning_rate": 2.9781333333333332e-05, "loss": 0.037, "step": 10659 }, { "epoch": 19.668513388734997, "grad_norm": 0.3871672451496124, "learning_rate": 2.9781e-05, "loss": 0.0573, "step": 10660 }, { "epoch": 19.670360110803323, "grad_norm": 0.35777178406715393, "learning_rate": 2.9780666666666668e-05, "loss": 0.048, "step": 10661 }, { "epoch": 19.67220683287165, "grad_norm": 0.4704509675502777, "learning_rate": 2.9780333333333334e-05, "loss": 0.0589, "step": 10662 }, { "epoch": 19.67405355493998, "grad_norm": 2.055455207824707, "learning_rate": 2.9780000000000003e-05, "loss": 0.078, "step": 10663 }, { "epoch": 19.67590027700831, "grad_norm": 0.25962790846824646, "learning_rate": 2.9779666666666665e-05, "loss": 0.0214, "step": 10664 }, { "epoch": 19.67774699907664, "grad_norm": 0.2785275876522064, "learning_rate": 2.9779333333333335e-05, "loss": 0.0127, "step": 10665 }, { "epoch": 19.67959372114497, "grad_norm": 0.37023863196372986, "learning_rate": 2.9779e-05, "loss": 0.0133, "step": 10666 }, { "epoch": 19.681440443213297, "grad_norm": 0.40001484751701355, "learning_rate": 2.9778666666666667e-05, "loss": 0.0324, "step": 10667 }, { "epoch": 19.683287165281627, "grad_norm": 0.26877978444099426, "learning_rate": 2.9778333333333333e-05, "loss": 0.0163, "step": 10668 }, { "epoch": 19.685133887349952, "grad_norm": 0.33660438656806946, "learning_rate": 2.9778000000000002e-05, "loss": 0.0141, "step": 10669 }, { "epoch": 19.68698060941828, "grad_norm": 0.3006255328655243, "learning_rate": 2.9777666666666668e-05, "loss": 0.0151, "step": 10670 }, { "epoch": 19.68882733148661, "grad_norm": 0.6988603472709656, "learning_rate": 2.9777333333333334e-05, "loss": 0.0268, "step": 10671 }, { "epoch": 19.69067405355494, "grad_norm": 0.4372357428073883, "learning_rate": 2.9777000000000003e-05, "loss": 0.009, "step": 10672 }, { "epoch": 19.69252077562327, "grad_norm": 0.9028867483139038, "learning_rate": 2.9776666666666666e-05, "loss": 0.0234, "step": 10673 }, { "epoch": 19.694367497691598, "grad_norm": 0.6994834542274475, "learning_rate": 2.9776333333333335e-05, "loss": 0.0181, "step": 10674 }, { "epoch": 19.696214219759927, "grad_norm": 1.2151726484298706, "learning_rate": 2.9776e-05, "loss": 0.0125, "step": 10675 }, { "epoch": 19.698060941828256, "grad_norm": 0.18534334003925323, "learning_rate": 2.9775666666666667e-05, "loss": 0.0095, "step": 10676 }, { "epoch": 19.69990766389658, "grad_norm": 0.21873116493225098, "learning_rate": 2.9775333333333333e-05, "loss": 0.0119, "step": 10677 }, { "epoch": 19.70175438596491, "grad_norm": 0.40969711542129517, "learning_rate": 2.9775000000000002e-05, "loss": 0.0103, "step": 10678 }, { "epoch": 19.70360110803324, "grad_norm": 0.5559830069541931, "learning_rate": 2.9774666666666668e-05, "loss": 0.0238, "step": 10679 }, { "epoch": 19.70544783010157, "grad_norm": 0.5412967205047607, "learning_rate": 2.9774333333333334e-05, "loss": 0.0146, "step": 10680 }, { "epoch": 19.7072945521699, "grad_norm": 0.12473435699939728, "learning_rate": 2.9774000000000003e-05, "loss": 0.0044, "step": 10681 }, { "epoch": 19.709141274238227, "grad_norm": 0.586225688457489, "learning_rate": 2.9773666666666666e-05, "loss": 0.0134, "step": 10682 }, { "epoch": 19.710987996306557, "grad_norm": 0.44378283619880676, "learning_rate": 2.9773333333333335e-05, "loss": 0.0173, "step": 10683 }, { "epoch": 19.712834718374886, "grad_norm": 0.5063177943229675, "learning_rate": 2.9772999999999998e-05, "loss": 0.0213, "step": 10684 }, { "epoch": 19.714681440443215, "grad_norm": 0.5654175281524658, "learning_rate": 2.9772666666666667e-05, "loss": 0.0101, "step": 10685 }, { "epoch": 19.71652816251154, "grad_norm": 0.222188800573349, "learning_rate": 2.9772333333333336e-05, "loss": 0.007, "step": 10686 }, { "epoch": 19.71837488457987, "grad_norm": 0.448893666267395, "learning_rate": 2.9772e-05, "loss": 0.0122, "step": 10687 }, { "epoch": 19.7202216066482, "grad_norm": 1.0693613290786743, "learning_rate": 2.9771666666666668e-05, "loss": 0.0169, "step": 10688 }, { "epoch": 19.722068328716528, "grad_norm": 0.35767197608947754, "learning_rate": 2.9771333333333334e-05, "loss": 0.0113, "step": 10689 }, { "epoch": 19.723915050784857, "grad_norm": 0.8080740571022034, "learning_rate": 2.9771e-05, "loss": 0.023, "step": 10690 }, { "epoch": 19.725761772853186, "grad_norm": 0.4295150339603424, "learning_rate": 2.9770666666666666e-05, "loss": 0.0161, "step": 10691 }, { "epoch": 19.727608494921515, "grad_norm": 0.3795710802078247, "learning_rate": 2.9770333333333335e-05, "loss": 0.0109, "step": 10692 }, { "epoch": 19.729455216989845, "grad_norm": 0.45034053921699524, "learning_rate": 2.9769999999999998e-05, "loss": 0.0193, "step": 10693 }, { "epoch": 19.73130193905817, "grad_norm": 0.4286927878856659, "learning_rate": 2.9769666666666667e-05, "loss": 0.0115, "step": 10694 }, { "epoch": 19.7331486611265, "grad_norm": 0.556318461894989, "learning_rate": 2.9769333333333337e-05, "loss": 0.0162, "step": 10695 }, { "epoch": 19.73499538319483, "grad_norm": 0.20577329397201538, "learning_rate": 2.9769e-05, "loss": 0.0077, "step": 10696 }, { "epoch": 19.736842105263158, "grad_norm": 0.16284756362438202, "learning_rate": 2.976866666666667e-05, "loss": 0.0067, "step": 10697 }, { "epoch": 19.738688827331487, "grad_norm": 1.3887354135513306, "learning_rate": 2.9768333333333334e-05, "loss": 0.0343, "step": 10698 }, { "epoch": 19.740535549399816, "grad_norm": 0.5854371786117554, "learning_rate": 2.9768e-05, "loss": 0.1647, "step": 10699 }, { "epoch": 19.742382271468145, "grad_norm": 0.5120023488998413, "learning_rate": 2.9767666666666666e-05, "loss": 0.1378, "step": 10700 }, { "epoch": 19.744228993536474, "grad_norm": 0.46874797344207764, "learning_rate": 2.9767333333333336e-05, "loss": 0.1202, "step": 10701 }, { "epoch": 19.746075715604803, "grad_norm": 0.5347771644592285, "learning_rate": 2.9766999999999998e-05, "loss": 0.1216, "step": 10702 }, { "epoch": 19.74792243767313, "grad_norm": 0.5150458216667175, "learning_rate": 2.9766666666666667e-05, "loss": 0.0987, "step": 10703 }, { "epoch": 19.749769159741458, "grad_norm": 0.5133240818977356, "learning_rate": 2.9766333333333337e-05, "loss": 0.1145, "step": 10704 }, { "epoch": 19.751615881809787, "grad_norm": 0.4530353844165802, "learning_rate": 2.9766e-05, "loss": 0.068, "step": 10705 }, { "epoch": 19.753462603878116, "grad_norm": 0.3664388656616211, "learning_rate": 2.976566666666667e-05, "loss": 0.0582, "step": 10706 }, { "epoch": 19.755309325946445, "grad_norm": 0.6541823148727417, "learning_rate": 2.9765333333333335e-05, "loss": 0.0758, "step": 10707 }, { "epoch": 19.757156048014775, "grad_norm": 0.3894199728965759, "learning_rate": 2.9765e-05, "loss": 0.0526, "step": 10708 }, { "epoch": 19.759002770083104, "grad_norm": 0.6326406002044678, "learning_rate": 2.9764666666666666e-05, "loss": 0.0544, "step": 10709 }, { "epoch": 19.760849492151433, "grad_norm": 0.5390200614929199, "learning_rate": 2.9764333333333336e-05, "loss": 0.04, "step": 10710 }, { "epoch": 19.76269621421976, "grad_norm": 0.2868020534515381, "learning_rate": 2.9764e-05, "loss": 0.0276, "step": 10711 }, { "epoch": 19.764542936288088, "grad_norm": 0.364552766084671, "learning_rate": 2.9763666666666668e-05, "loss": 0.0221, "step": 10712 }, { "epoch": 19.766389658356417, "grad_norm": 0.4122895300388336, "learning_rate": 2.9763333333333337e-05, "loss": 0.0272, "step": 10713 }, { "epoch": 19.768236380424746, "grad_norm": 0.3642270863056183, "learning_rate": 2.9763e-05, "loss": 0.0507, "step": 10714 }, { "epoch": 19.770083102493075, "grad_norm": 0.30101460218429565, "learning_rate": 2.976266666666667e-05, "loss": 0.0406, "step": 10715 }, { "epoch": 19.771929824561404, "grad_norm": 0.27228695154190063, "learning_rate": 2.976233333333333e-05, "loss": 0.0136, "step": 10716 }, { "epoch": 19.773776546629733, "grad_norm": 0.3003411293029785, "learning_rate": 2.9762e-05, "loss": 0.0117, "step": 10717 }, { "epoch": 19.775623268698062, "grad_norm": 0.4910516142845154, "learning_rate": 2.9761666666666667e-05, "loss": 0.0123, "step": 10718 }, { "epoch": 19.777469990766388, "grad_norm": 0.27473193407058716, "learning_rate": 2.9761333333333333e-05, "loss": 0.0144, "step": 10719 }, { "epoch": 19.779316712834717, "grad_norm": 0.20299680531024933, "learning_rate": 2.9761000000000002e-05, "loss": 0.012, "step": 10720 }, { "epoch": 19.781163434903046, "grad_norm": 0.16587220132350922, "learning_rate": 2.9760666666666668e-05, "loss": 0.0071, "step": 10721 }, { "epoch": 19.783010156971375, "grad_norm": 0.6967406868934631, "learning_rate": 2.9760333333333334e-05, "loss": 0.0166, "step": 10722 }, { "epoch": 19.784856879039705, "grad_norm": 0.2570898234844208, "learning_rate": 2.976e-05, "loss": 0.0105, "step": 10723 }, { "epoch": 19.786703601108034, "grad_norm": 0.3881112337112427, "learning_rate": 2.975966666666667e-05, "loss": 0.0115, "step": 10724 }, { "epoch": 19.788550323176363, "grad_norm": 0.4173627495765686, "learning_rate": 2.975933333333333e-05, "loss": 0.0127, "step": 10725 }, { "epoch": 19.790397045244692, "grad_norm": 0.4523608684539795, "learning_rate": 2.9759e-05, "loss": 0.0142, "step": 10726 }, { "epoch": 19.792243767313018, "grad_norm": 0.1756921112537384, "learning_rate": 2.9758666666666667e-05, "loss": 0.0044, "step": 10727 }, { "epoch": 19.794090489381347, "grad_norm": 0.2844432592391968, "learning_rate": 2.9758333333333333e-05, "loss": 0.0079, "step": 10728 }, { "epoch": 19.795937211449676, "grad_norm": 0.2023969292640686, "learning_rate": 2.9758000000000002e-05, "loss": 0.0102, "step": 10729 }, { "epoch": 19.797783933518005, "grad_norm": 0.2869013249874115, "learning_rate": 2.9757666666666668e-05, "loss": 0.0088, "step": 10730 }, { "epoch": 19.799630655586334, "grad_norm": 0.2981497347354889, "learning_rate": 2.9757333333333334e-05, "loss": 0.0106, "step": 10731 }, { "epoch": 19.801477377654663, "grad_norm": 0.6983348727226257, "learning_rate": 2.9757e-05, "loss": 0.0142, "step": 10732 }, { "epoch": 19.803324099722992, "grad_norm": 0.28833532333374023, "learning_rate": 2.975666666666667e-05, "loss": 0.0117, "step": 10733 }, { "epoch": 19.80517082179132, "grad_norm": 0.5392321944236755, "learning_rate": 2.9756333333333332e-05, "loss": 0.0118, "step": 10734 }, { "epoch": 19.80701754385965, "grad_norm": 0.29473716020584106, "learning_rate": 2.9756e-05, "loss": 0.0104, "step": 10735 }, { "epoch": 19.808864265927976, "grad_norm": 0.24462805688381195, "learning_rate": 2.975566666666667e-05, "loss": 0.0084, "step": 10736 }, { "epoch": 19.810710987996305, "grad_norm": 0.20876191556453705, "learning_rate": 2.9755333333333333e-05, "loss": 0.0104, "step": 10737 }, { "epoch": 19.812557710064635, "grad_norm": 1.0548560619354248, "learning_rate": 2.9755000000000002e-05, "loss": 0.0134, "step": 10738 }, { "epoch": 19.814404432132964, "grad_norm": 0.18749235570430756, "learning_rate": 2.9754666666666668e-05, "loss": 0.0052, "step": 10739 }, { "epoch": 19.816251154201293, "grad_norm": 0.5859012603759766, "learning_rate": 2.9754333333333334e-05, "loss": 0.0073, "step": 10740 }, { "epoch": 19.818097876269622, "grad_norm": 0.3927551209926605, "learning_rate": 2.9754e-05, "loss": 0.0147, "step": 10741 }, { "epoch": 19.81994459833795, "grad_norm": 0.2841923236846924, "learning_rate": 2.975366666666667e-05, "loss": 0.0089, "step": 10742 }, { "epoch": 19.82179132040628, "grad_norm": 0.26941174268722534, "learning_rate": 2.9753333333333332e-05, "loss": 0.0128, "step": 10743 }, { "epoch": 19.823638042474606, "grad_norm": 0.32388725876808167, "learning_rate": 2.9753e-05, "loss": 0.0132, "step": 10744 }, { "epoch": 19.825484764542935, "grad_norm": 0.9663119316101074, "learning_rate": 2.9752666666666667e-05, "loss": 0.0159, "step": 10745 }, { "epoch": 19.827331486611264, "grad_norm": 0.2247830033302307, "learning_rate": 2.9752333333333333e-05, "loss": 0.0092, "step": 10746 }, { "epoch": 19.829178208679593, "grad_norm": 0.169455423951149, "learning_rate": 2.9752000000000002e-05, "loss": 0.0068, "step": 10747 }, { "epoch": 19.831024930747922, "grad_norm": 0.6221871972084045, "learning_rate": 2.9751666666666665e-05, "loss": 0.0156, "step": 10748 }, { "epoch": 19.83287165281625, "grad_norm": 0.65969318151474, "learning_rate": 2.9751333333333334e-05, "loss": 0.186, "step": 10749 }, { "epoch": 19.83471837488458, "grad_norm": 0.587780237197876, "learning_rate": 2.9751e-05, "loss": 0.1271, "step": 10750 }, { "epoch": 19.83656509695291, "grad_norm": 0.6122603416442871, "learning_rate": 2.9750666666666666e-05, "loss": 0.1059, "step": 10751 }, { "epoch": 19.83841181902124, "grad_norm": 0.9975597262382507, "learning_rate": 2.9750333333333332e-05, "loss": 0.1889, "step": 10752 }, { "epoch": 19.840258541089565, "grad_norm": 0.4894188940525055, "learning_rate": 2.975e-05, "loss": 0.0773, "step": 10753 }, { "epoch": 19.842105263157894, "grad_norm": 0.5466663837432861, "learning_rate": 2.9749666666666667e-05, "loss": 0.0746, "step": 10754 }, { "epoch": 19.843951985226223, "grad_norm": 0.44391313195228577, "learning_rate": 2.9749333333333333e-05, "loss": 0.0762, "step": 10755 }, { "epoch": 19.845798707294552, "grad_norm": 0.7194757461547852, "learning_rate": 2.9749000000000003e-05, "loss": 0.0869, "step": 10756 }, { "epoch": 19.84764542936288, "grad_norm": 0.42723938822746277, "learning_rate": 2.9748666666666665e-05, "loss": 0.0938, "step": 10757 }, { "epoch": 19.84949215143121, "grad_norm": 0.4974992871284485, "learning_rate": 2.9748333333333335e-05, "loss": 0.0541, "step": 10758 }, { "epoch": 19.85133887349954, "grad_norm": 0.27553194761276245, "learning_rate": 2.9748e-05, "loss": 0.0281, "step": 10759 }, { "epoch": 19.85318559556787, "grad_norm": 0.5271324515342712, "learning_rate": 2.9747666666666666e-05, "loss": 0.0481, "step": 10760 }, { "epoch": 19.855032317636194, "grad_norm": 0.704689085483551, "learning_rate": 2.9747333333333336e-05, "loss": 0.0303, "step": 10761 }, { "epoch": 19.856879039704523, "grad_norm": 0.41468116641044617, "learning_rate": 2.9747e-05, "loss": 0.0265, "step": 10762 }, { "epoch": 19.858725761772853, "grad_norm": 0.49711140990257263, "learning_rate": 2.9746666666666668e-05, "loss": 0.0488, "step": 10763 }, { "epoch": 19.86057248384118, "grad_norm": 0.3405868709087372, "learning_rate": 2.9746333333333334e-05, "loss": 0.0256, "step": 10764 }, { "epoch": 19.86241920590951, "grad_norm": 0.41757965087890625, "learning_rate": 2.9746000000000003e-05, "loss": 0.0204, "step": 10765 }, { "epoch": 19.86426592797784, "grad_norm": 0.36645424365997314, "learning_rate": 2.9745666666666665e-05, "loss": 0.0261, "step": 10766 }, { "epoch": 19.86611265004617, "grad_norm": 0.2549731433391571, "learning_rate": 2.9745333333333335e-05, "loss": 0.014, "step": 10767 }, { "epoch": 19.8679593721145, "grad_norm": 0.4444524645805359, "learning_rate": 2.9745e-05, "loss": 0.0123, "step": 10768 }, { "epoch": 19.869806094182824, "grad_norm": 0.49080604314804077, "learning_rate": 2.9744666666666667e-05, "loss": 0.0274, "step": 10769 }, { "epoch": 19.871652816251153, "grad_norm": 0.3654726445674896, "learning_rate": 2.9744333333333336e-05, "loss": 0.0119, "step": 10770 }, { "epoch": 19.873499538319482, "grad_norm": 0.3014700412750244, "learning_rate": 2.9744000000000002e-05, "loss": 0.0097, "step": 10771 }, { "epoch": 19.87534626038781, "grad_norm": 0.2731376588344574, "learning_rate": 2.9743666666666668e-05, "loss": 0.0108, "step": 10772 }, { "epoch": 19.87719298245614, "grad_norm": 0.21904848515987396, "learning_rate": 2.9743333333333334e-05, "loss": 0.0114, "step": 10773 }, { "epoch": 19.87903970452447, "grad_norm": 0.41635486483573914, "learning_rate": 2.9743000000000003e-05, "loss": 0.0156, "step": 10774 }, { "epoch": 19.8808864265928, "grad_norm": 0.3734789490699768, "learning_rate": 2.9742666666666666e-05, "loss": 0.0113, "step": 10775 }, { "epoch": 19.882733148661128, "grad_norm": 0.44643911719322205, "learning_rate": 2.9742333333333335e-05, "loss": 0.0119, "step": 10776 }, { "epoch": 19.884579870729453, "grad_norm": 0.2010699212551117, "learning_rate": 2.9742e-05, "loss": 0.0094, "step": 10777 }, { "epoch": 19.886426592797783, "grad_norm": 0.3544909656047821, "learning_rate": 2.9741666666666667e-05, "loss": 0.011, "step": 10778 }, { "epoch": 19.88827331486611, "grad_norm": 0.3866400122642517, "learning_rate": 2.9741333333333336e-05, "loss": 0.0073, "step": 10779 }, { "epoch": 19.89012003693444, "grad_norm": 0.34151676297187805, "learning_rate": 2.9741e-05, "loss": 0.0169, "step": 10780 }, { "epoch": 19.89196675900277, "grad_norm": 0.29591402411460876, "learning_rate": 2.9740666666666668e-05, "loss": 0.008, "step": 10781 }, { "epoch": 19.8938134810711, "grad_norm": 0.30311036109924316, "learning_rate": 2.9740333333333334e-05, "loss": 0.0125, "step": 10782 }, { "epoch": 19.89566020313943, "grad_norm": 0.19861190021038055, "learning_rate": 2.974e-05, "loss": 0.0068, "step": 10783 }, { "epoch": 19.897506925207757, "grad_norm": 0.22376330196857452, "learning_rate": 2.9739666666666666e-05, "loss": 0.0081, "step": 10784 }, { "epoch": 19.899353647276087, "grad_norm": 0.4291958808898926, "learning_rate": 2.9739333333333335e-05, "loss": 0.0147, "step": 10785 }, { "epoch": 19.901200369344412, "grad_norm": 0.3459694981575012, "learning_rate": 2.9739e-05, "loss": 0.0287, "step": 10786 }, { "epoch": 19.90304709141274, "grad_norm": 0.3165818154811859, "learning_rate": 2.9738666666666667e-05, "loss": 0.0101, "step": 10787 }, { "epoch": 19.90489381348107, "grad_norm": 0.16458719968795776, "learning_rate": 2.9738333333333336e-05, "loss": 0.0063, "step": 10788 }, { "epoch": 19.9067405355494, "grad_norm": 0.26842695474624634, "learning_rate": 2.9738e-05, "loss": 0.0077, "step": 10789 }, { "epoch": 19.90858725761773, "grad_norm": 0.6238064765930176, "learning_rate": 2.9737666666666668e-05, "loss": 0.0105, "step": 10790 }, { "epoch": 19.910433979686058, "grad_norm": 0.45984452962875366, "learning_rate": 2.9737333333333334e-05, "loss": 0.0136, "step": 10791 }, { "epoch": 19.912280701754387, "grad_norm": 0.22845137119293213, "learning_rate": 2.9737e-05, "loss": 0.0095, "step": 10792 }, { "epoch": 19.914127423822716, "grad_norm": 0.6064977645874023, "learning_rate": 2.9736666666666666e-05, "loss": 0.0135, "step": 10793 }, { "epoch": 19.91597414589104, "grad_norm": 0.6048700213432312, "learning_rate": 2.9736333333333335e-05, "loss": 0.0103, "step": 10794 }, { "epoch": 19.91782086795937, "grad_norm": 0.6151441931724548, "learning_rate": 2.9736e-05, "loss": 0.0128, "step": 10795 }, { "epoch": 19.9196675900277, "grad_norm": 0.722559928894043, "learning_rate": 2.9735666666666667e-05, "loss": 0.0122, "step": 10796 }, { "epoch": 19.92151431209603, "grad_norm": 0.9557310342788696, "learning_rate": 2.9735333333333337e-05, "loss": 0.016, "step": 10797 }, { "epoch": 19.92336103416436, "grad_norm": 0.5047065615653992, "learning_rate": 2.9735e-05, "loss": 0.0143, "step": 10798 }, { "epoch": 19.925207756232687, "grad_norm": 0.9226062297821045, "learning_rate": 2.973466666666667e-05, "loss": 0.1909, "step": 10799 }, { "epoch": 19.927054478301017, "grad_norm": 0.7328792214393616, "learning_rate": 2.9734333333333334e-05, "loss": 0.1344, "step": 10800 }, { "epoch": 19.928901200369346, "grad_norm": 0.9491218328475952, "learning_rate": 2.9734e-05, "loss": 0.1537, "step": 10801 }, { "epoch": 19.930747922437675, "grad_norm": 0.7314680218696594, "learning_rate": 2.9733666666666666e-05, "loss": 0.1021, "step": 10802 }, { "epoch": 19.932594644506, "grad_norm": 0.3304039537906647, "learning_rate": 2.9733333333333336e-05, "loss": 0.07, "step": 10803 }, { "epoch": 19.93444136657433, "grad_norm": 0.7055652737617493, "learning_rate": 2.9733e-05, "loss": 0.1535, "step": 10804 }, { "epoch": 19.93628808864266, "grad_norm": 0.4285171627998352, "learning_rate": 2.9732666666666667e-05, "loss": 0.0605, "step": 10805 }, { "epoch": 19.938134810710988, "grad_norm": 0.5491939783096313, "learning_rate": 2.9732333333333333e-05, "loss": 0.0579, "step": 10806 }, { "epoch": 19.939981532779317, "grad_norm": 0.6970460414886475, "learning_rate": 2.9732e-05, "loss": 0.0504, "step": 10807 }, { "epoch": 19.941828254847646, "grad_norm": 0.36753565073013306, "learning_rate": 2.973166666666667e-05, "loss": 0.0547, "step": 10808 }, { "epoch": 19.943674976915975, "grad_norm": 0.4938269555568695, "learning_rate": 2.973133333333333e-05, "loss": 0.0613, "step": 10809 }, { "epoch": 19.945521698984304, "grad_norm": 0.2081955224275589, "learning_rate": 2.9731e-05, "loss": 0.0236, "step": 10810 }, { "epoch": 19.94736842105263, "grad_norm": 0.3069004714488983, "learning_rate": 2.973066666666667e-05, "loss": 0.0327, "step": 10811 }, { "epoch": 19.94921514312096, "grad_norm": 0.5289403796195984, "learning_rate": 2.9730333333333332e-05, "loss": 0.0276, "step": 10812 }, { "epoch": 19.95106186518929, "grad_norm": 0.26222914457321167, "learning_rate": 2.973e-05, "loss": 0.0095, "step": 10813 }, { "epoch": 19.952908587257618, "grad_norm": 0.4635355770587921, "learning_rate": 2.9729666666666668e-05, "loss": 0.029, "step": 10814 }, { "epoch": 19.954755309325947, "grad_norm": 0.43034881353378296, "learning_rate": 2.9729333333333334e-05, "loss": 0.0311, "step": 10815 }, { "epoch": 19.956602031394276, "grad_norm": 0.2856021821498871, "learning_rate": 2.9729e-05, "loss": 0.0167, "step": 10816 }, { "epoch": 19.958448753462605, "grad_norm": 0.21279774606227875, "learning_rate": 2.972866666666667e-05, "loss": 0.0122, "step": 10817 }, { "epoch": 19.960295475530934, "grad_norm": 0.18603555858135223, "learning_rate": 2.972833333333333e-05, "loss": 0.0056, "step": 10818 }, { "epoch": 19.96214219759926, "grad_norm": 0.25018876791000366, "learning_rate": 2.9728e-05, "loss": 0.0151, "step": 10819 }, { "epoch": 19.96398891966759, "grad_norm": 0.23212258517742157, "learning_rate": 2.972766666666667e-05, "loss": 0.0104, "step": 10820 }, { "epoch": 19.965835641735918, "grad_norm": 0.5990542769432068, "learning_rate": 2.9727333333333333e-05, "loss": 0.0096, "step": 10821 }, { "epoch": 19.967682363804247, "grad_norm": 0.29488661885261536, "learning_rate": 2.9727000000000002e-05, "loss": 0.0179, "step": 10822 }, { "epoch": 19.969529085872576, "grad_norm": 0.2727646827697754, "learning_rate": 2.9726666666666668e-05, "loss": 0.0326, "step": 10823 }, { "epoch": 19.971375807940905, "grad_norm": 0.21818789839744568, "learning_rate": 2.9726333333333334e-05, "loss": 0.0071, "step": 10824 }, { "epoch": 19.973222530009235, "grad_norm": 0.3400208055973053, "learning_rate": 2.9726e-05, "loss": 0.0087, "step": 10825 }, { "epoch": 19.975069252077564, "grad_norm": 0.4294982850551605, "learning_rate": 2.972566666666667e-05, "loss": 0.0164, "step": 10826 }, { "epoch": 19.97691597414589, "grad_norm": 0.20422349870204926, "learning_rate": 2.9725333333333335e-05, "loss": 0.0102, "step": 10827 }, { "epoch": 19.97876269621422, "grad_norm": 0.3575103282928467, "learning_rate": 2.9725e-05, "loss": 0.0097, "step": 10828 }, { "epoch": 19.980609418282548, "grad_norm": 0.43506139516830444, "learning_rate": 2.972466666666667e-05, "loss": 0.0115, "step": 10829 }, { "epoch": 19.982456140350877, "grad_norm": 0.39745500683784485, "learning_rate": 2.9724333333333333e-05, "loss": 0.0112, "step": 10830 }, { "epoch": 19.984302862419206, "grad_norm": 0.32903939485549927, "learning_rate": 2.9724000000000002e-05, "loss": 0.0168, "step": 10831 }, { "epoch": 19.986149584487535, "grad_norm": 0.5643303990364075, "learning_rate": 2.9723666666666668e-05, "loss": 0.0216, "step": 10832 }, { "epoch": 19.987996306555864, "grad_norm": 0.27045926451683044, "learning_rate": 2.9723333333333334e-05, "loss": 0.0096, "step": 10833 }, { "epoch": 19.989843028624193, "grad_norm": 0.26068615913391113, "learning_rate": 2.9723e-05, "loss": 0.0103, "step": 10834 }, { "epoch": 19.991689750692522, "grad_norm": 0.31268391013145447, "learning_rate": 2.9722666666666666e-05, "loss": 0.0086, "step": 10835 }, { "epoch": 19.993536472760848, "grad_norm": 0.26672235131263733, "learning_rate": 2.9722333333333335e-05, "loss": 0.0077, "step": 10836 }, { "epoch": 19.995383194829177, "grad_norm": 0.36160510778427124, "learning_rate": 2.9722e-05, "loss": 0.0095, "step": 10837 }, { "epoch": 19.997229916897506, "grad_norm": 0.7074096202850342, "learning_rate": 2.9721666666666667e-05, "loss": 0.0127, "step": 10838 }, { "epoch": 19.999076638965835, "grad_norm": 0.31743553280830383, "learning_rate": 2.9721333333333333e-05, "loss": 0.0114, "step": 10839 }, { "epoch": 20.0, "grad_norm": 0.265717089176178, "learning_rate": 2.9721000000000002e-05, "loss": 0.0028, "step": 10840 }, { "epoch": 20.00184672206833, "grad_norm": 0.49848538637161255, "learning_rate": 2.9720666666666665e-05, "loss": 0.152, "step": 10841 }, { "epoch": 20.00369344413666, "grad_norm": 0.6284631490707397, "learning_rate": 2.9720333333333334e-05, "loss": 0.166, "step": 10842 }, { "epoch": 20.005540166204987, "grad_norm": 0.4607674181461334, "learning_rate": 2.972e-05, "loss": 0.1029, "step": 10843 }, { "epoch": 20.007386888273317, "grad_norm": 0.5444645285606384, "learning_rate": 2.9719666666666666e-05, "loss": 0.0861, "step": 10844 }, { "epoch": 20.009233610341642, "grad_norm": 0.4144648313522339, "learning_rate": 2.9719333333333335e-05, "loss": 0.0896, "step": 10845 }, { "epoch": 20.01108033240997, "grad_norm": 0.5311521887779236, "learning_rate": 2.9719e-05, "loss": 0.0902, "step": 10846 }, { "epoch": 20.0129270544783, "grad_norm": 0.3980214297771454, "learning_rate": 2.9718666666666667e-05, "loss": 0.0537, "step": 10847 }, { "epoch": 20.01477377654663, "grad_norm": 0.6435825824737549, "learning_rate": 2.9718333333333333e-05, "loss": 0.1102, "step": 10848 }, { "epoch": 20.01662049861496, "grad_norm": 0.41154757142066956, "learning_rate": 2.9718000000000002e-05, "loss": 0.0489, "step": 10849 }, { "epoch": 20.018467220683288, "grad_norm": 0.4965069890022278, "learning_rate": 2.9717666666666665e-05, "loss": 0.0433, "step": 10850 }, { "epoch": 20.020313942751617, "grad_norm": 0.3039763271808624, "learning_rate": 2.9717333333333334e-05, "loss": 0.0643, "step": 10851 }, { "epoch": 20.022160664819946, "grad_norm": 0.30471768975257874, "learning_rate": 2.9717e-05, "loss": 0.0276, "step": 10852 }, { "epoch": 20.02400738688827, "grad_norm": 0.3537667393684387, "learning_rate": 2.9716666666666666e-05, "loss": 0.0572, "step": 10853 }, { "epoch": 20.0258541089566, "grad_norm": 0.3218044638633728, "learning_rate": 2.9716333333333336e-05, "loss": 0.032, "step": 10854 }, { "epoch": 20.02770083102493, "grad_norm": 0.21395094692707062, "learning_rate": 2.9716e-05, "loss": 0.0155, "step": 10855 }, { "epoch": 20.02954755309326, "grad_norm": 0.1753055602312088, "learning_rate": 2.9715666666666667e-05, "loss": 0.0098, "step": 10856 }, { "epoch": 20.03139427516159, "grad_norm": 0.1765054166316986, "learning_rate": 2.9715333333333333e-05, "loss": 0.0081, "step": 10857 }, { "epoch": 20.033240997229917, "grad_norm": 0.15867426991462708, "learning_rate": 2.9715000000000003e-05, "loss": 0.0093, "step": 10858 }, { "epoch": 20.035087719298247, "grad_norm": 0.2986152172088623, "learning_rate": 2.9714666666666665e-05, "loss": 0.0131, "step": 10859 }, { "epoch": 20.036934441366576, "grad_norm": 0.5510702729225159, "learning_rate": 2.9714333333333335e-05, "loss": 0.0383, "step": 10860 }, { "epoch": 20.0387811634349, "grad_norm": 0.21956251561641693, "learning_rate": 2.9714000000000004e-05, "loss": 0.01, "step": 10861 }, { "epoch": 20.04062788550323, "grad_norm": 0.33517369627952576, "learning_rate": 2.9713666666666666e-05, "loss": 0.0155, "step": 10862 }, { "epoch": 20.04247460757156, "grad_norm": 0.5131813287734985, "learning_rate": 2.9713333333333336e-05, "loss": 0.0175, "step": 10863 }, { "epoch": 20.04432132963989, "grad_norm": 0.18757785856723785, "learning_rate": 2.9713e-05, "loss": 0.0084, "step": 10864 }, { "epoch": 20.046168051708218, "grad_norm": 0.14929111301898956, "learning_rate": 2.9712666666666668e-05, "loss": 0.0058, "step": 10865 }, { "epoch": 20.048014773776547, "grad_norm": 0.3974211513996124, "learning_rate": 2.9712333333333334e-05, "loss": 0.0118, "step": 10866 }, { "epoch": 20.049861495844876, "grad_norm": 0.3090802729129791, "learning_rate": 2.9712e-05, "loss": 0.0074, "step": 10867 }, { "epoch": 20.051708217913205, "grad_norm": 0.249526709318161, "learning_rate": 2.9711666666666665e-05, "loss": 0.0131, "step": 10868 }, { "epoch": 20.053554939981534, "grad_norm": 0.2154981642961502, "learning_rate": 2.9711333333333335e-05, "loss": 0.0055, "step": 10869 }, { "epoch": 20.05540166204986, "grad_norm": 0.18604527413845062, "learning_rate": 2.9711e-05, "loss": 0.0062, "step": 10870 }, { "epoch": 20.05724838411819, "grad_norm": 0.26657775044441223, "learning_rate": 2.9710666666666667e-05, "loss": 0.0076, "step": 10871 }, { "epoch": 20.05909510618652, "grad_norm": 0.16824635863304138, "learning_rate": 2.9710333333333336e-05, "loss": 0.0036, "step": 10872 }, { "epoch": 20.060941828254848, "grad_norm": 0.22853249311447144, "learning_rate": 2.971e-05, "loss": 0.0062, "step": 10873 }, { "epoch": 20.062788550323177, "grad_norm": 0.1347638964653015, "learning_rate": 2.9709666666666668e-05, "loss": 0.0043, "step": 10874 }, { "epoch": 20.064635272391506, "grad_norm": 0.5696449279785156, "learning_rate": 2.9709333333333334e-05, "loss": 0.0202, "step": 10875 }, { "epoch": 20.066481994459835, "grad_norm": 0.1813577264547348, "learning_rate": 2.9709e-05, "loss": 0.006, "step": 10876 }, { "epoch": 20.068328716528164, "grad_norm": 0.24455659091472626, "learning_rate": 2.970866666666667e-05, "loss": 0.036, "step": 10877 }, { "epoch": 20.07017543859649, "grad_norm": 0.26324525475502014, "learning_rate": 2.9708333333333335e-05, "loss": 0.0101, "step": 10878 }, { "epoch": 20.07202216066482, "grad_norm": 0.3263266682624817, "learning_rate": 2.9708e-05, "loss": 0.0065, "step": 10879 }, { "epoch": 20.073868882733148, "grad_norm": 0.43081149458885193, "learning_rate": 2.9707666666666667e-05, "loss": 0.0163, "step": 10880 }, { "epoch": 20.075715604801477, "grad_norm": 0.2542450726032257, "learning_rate": 2.9707333333333336e-05, "loss": 0.007, "step": 10881 }, { "epoch": 20.077562326869806, "grad_norm": 0.5075932741165161, "learning_rate": 2.9707e-05, "loss": 0.0137, "step": 10882 }, { "epoch": 20.079409048938135, "grad_norm": 0.47679972648620605, "learning_rate": 2.9706666666666668e-05, "loss": 0.0112, "step": 10883 }, { "epoch": 20.081255771006465, "grad_norm": 0.2431761473417282, "learning_rate": 2.9706333333333334e-05, "loss": 0.0082, "step": 10884 }, { "epoch": 20.083102493074794, "grad_norm": 0.3411027789115906, "learning_rate": 2.9706e-05, "loss": 0.0096, "step": 10885 }, { "epoch": 20.08494921514312, "grad_norm": 0.4069492816925049, "learning_rate": 2.970566666666667e-05, "loss": 0.0056, "step": 10886 }, { "epoch": 20.08679593721145, "grad_norm": 0.283371239900589, "learning_rate": 2.9705333333333335e-05, "loss": 0.0101, "step": 10887 }, { "epoch": 20.088642659279778, "grad_norm": 0.1481645703315735, "learning_rate": 2.9705e-05, "loss": 0.0033, "step": 10888 }, { "epoch": 20.090489381348107, "grad_norm": 0.45300835371017456, "learning_rate": 2.9704666666666667e-05, "loss": 0.0091, "step": 10889 }, { "epoch": 20.092336103416436, "grad_norm": 0.6289262175559998, "learning_rate": 2.9704333333333336e-05, "loss": 0.0061, "step": 10890 }, { "epoch": 20.094182825484765, "grad_norm": 0.5154815316200256, "learning_rate": 2.9704e-05, "loss": 0.1376, "step": 10891 }, { "epoch": 20.096029547553094, "grad_norm": 0.49002134799957275, "learning_rate": 2.9703666666666668e-05, "loss": 0.1471, "step": 10892 }, { "epoch": 20.097876269621423, "grad_norm": 0.4354952871799469, "learning_rate": 2.9703333333333334e-05, "loss": 0.1032, "step": 10893 }, { "epoch": 20.099722991689752, "grad_norm": 0.46918243169784546, "learning_rate": 2.9703e-05, "loss": 0.0712, "step": 10894 }, { "epoch": 20.101569713758078, "grad_norm": 0.350827693939209, "learning_rate": 2.970266666666667e-05, "loss": 0.0663, "step": 10895 }, { "epoch": 20.103416435826407, "grad_norm": 0.4532705247402191, "learning_rate": 2.9702333333333332e-05, "loss": 0.0965, "step": 10896 }, { "epoch": 20.105263157894736, "grad_norm": 0.4961085915565491, "learning_rate": 2.9702e-05, "loss": 0.0687, "step": 10897 }, { "epoch": 20.107109879963065, "grad_norm": 0.41538205742836, "learning_rate": 2.9701666666666667e-05, "loss": 0.0523, "step": 10898 }, { "epoch": 20.108956602031395, "grad_norm": 0.8226404190063477, "learning_rate": 2.9701333333333333e-05, "loss": 0.0539, "step": 10899 }, { "epoch": 20.110803324099724, "grad_norm": 0.3654651343822479, "learning_rate": 2.9701e-05, "loss": 0.0602, "step": 10900 }, { "epoch": 20.112650046168053, "grad_norm": 0.3377811908721924, "learning_rate": 2.970066666666667e-05, "loss": 0.0319, "step": 10901 }, { "epoch": 20.114496768236382, "grad_norm": 0.34735211730003357, "learning_rate": 2.9700333333333334e-05, "loss": 0.0557, "step": 10902 }, { "epoch": 20.116343490304708, "grad_norm": 0.3268611431121826, "learning_rate": 2.97e-05, "loss": 0.0273, "step": 10903 }, { "epoch": 20.118190212373037, "grad_norm": 0.3331509530544281, "learning_rate": 2.969966666666667e-05, "loss": 0.0452, "step": 10904 }, { "epoch": 20.120036934441366, "grad_norm": 0.2206970751285553, "learning_rate": 2.9699333333333332e-05, "loss": 0.0233, "step": 10905 }, { "epoch": 20.121883656509695, "grad_norm": 0.30379095673561096, "learning_rate": 2.9699e-05, "loss": 0.0092, "step": 10906 }, { "epoch": 20.123730378578024, "grad_norm": 0.6868551969528198, "learning_rate": 2.9698666666666667e-05, "loss": 0.0436, "step": 10907 }, { "epoch": 20.125577100646353, "grad_norm": 0.38484877347946167, "learning_rate": 2.9698333333333333e-05, "loss": 0.0208, "step": 10908 }, { "epoch": 20.127423822714682, "grad_norm": 0.2958061099052429, "learning_rate": 2.9698e-05, "loss": 0.016, "step": 10909 }, { "epoch": 20.12927054478301, "grad_norm": 0.16953197121620178, "learning_rate": 2.969766666666667e-05, "loss": 0.0077, "step": 10910 }, { "epoch": 20.131117266851337, "grad_norm": 0.2099265456199646, "learning_rate": 2.9697333333333335e-05, "loss": 0.0121, "step": 10911 }, { "epoch": 20.132963988919666, "grad_norm": 0.2694770097732544, "learning_rate": 2.9697e-05, "loss": 0.0101, "step": 10912 }, { "epoch": 20.134810710987995, "grad_norm": 0.4015445411205292, "learning_rate": 2.969666666666667e-05, "loss": 0.0099, "step": 10913 }, { "epoch": 20.136657433056325, "grad_norm": 0.5667865872383118, "learning_rate": 2.9696333333333332e-05, "loss": 0.0103, "step": 10914 }, { "epoch": 20.138504155124654, "grad_norm": 0.3129112422466278, "learning_rate": 2.9696e-05, "loss": 0.007, "step": 10915 }, { "epoch": 20.140350877192983, "grad_norm": 0.3248129189014435, "learning_rate": 2.9695666666666668e-05, "loss": 0.0132, "step": 10916 }, { "epoch": 20.142197599261312, "grad_norm": 0.24504362046718597, "learning_rate": 2.9695333333333334e-05, "loss": 0.0073, "step": 10917 }, { "epoch": 20.14404432132964, "grad_norm": 0.3159309923648834, "learning_rate": 2.9695e-05, "loss": 0.0079, "step": 10918 }, { "epoch": 20.14589104339797, "grad_norm": 0.4856783151626587, "learning_rate": 2.969466666666667e-05, "loss": 0.0138, "step": 10919 }, { "epoch": 20.147737765466296, "grad_norm": 0.3120325803756714, "learning_rate": 2.9694333333333335e-05, "loss": 0.0057, "step": 10920 }, { "epoch": 20.149584487534625, "grad_norm": 0.25674888491630554, "learning_rate": 2.9694e-05, "loss": 0.0064, "step": 10921 }, { "epoch": 20.151431209602954, "grad_norm": 0.17604240775108337, "learning_rate": 2.969366666666667e-05, "loss": 0.004, "step": 10922 }, { "epoch": 20.153277931671283, "grad_norm": 0.3554757237434387, "learning_rate": 2.9693333333333333e-05, "loss": 0.0107, "step": 10923 }, { "epoch": 20.155124653739612, "grad_norm": 0.31348365545272827, "learning_rate": 2.9693000000000002e-05, "loss": 0.012, "step": 10924 }, { "epoch": 20.15697137580794, "grad_norm": 0.39087775349617004, "learning_rate": 2.9692666666666668e-05, "loss": 0.0082, "step": 10925 }, { "epoch": 20.15881809787627, "grad_norm": 0.24652767181396484, "learning_rate": 2.9692333333333334e-05, "loss": 0.0078, "step": 10926 }, { "epoch": 20.1606648199446, "grad_norm": 0.3166562616825104, "learning_rate": 2.9692000000000003e-05, "loss": 0.0133, "step": 10927 }, { "epoch": 20.162511542012926, "grad_norm": 0.2203536182641983, "learning_rate": 2.9691666666666666e-05, "loss": 0.0087, "step": 10928 }, { "epoch": 20.164358264081255, "grad_norm": 0.2523220181465149, "learning_rate": 2.9691333333333335e-05, "loss": 0.0117, "step": 10929 }, { "epoch": 20.166204986149584, "grad_norm": 0.6120240092277527, "learning_rate": 2.9691e-05, "loss": 0.0143, "step": 10930 }, { "epoch": 20.168051708217913, "grad_norm": 0.34441477060317993, "learning_rate": 2.9690666666666667e-05, "loss": 0.0097, "step": 10931 }, { "epoch": 20.169898430286242, "grad_norm": 0.35958561301231384, "learning_rate": 2.9690333333333333e-05, "loss": 0.0057, "step": 10932 }, { "epoch": 20.17174515235457, "grad_norm": 0.20114681124687195, "learning_rate": 2.9690000000000002e-05, "loss": 0.0064, "step": 10933 }, { "epoch": 20.1735918744229, "grad_norm": 0.40742751955986023, "learning_rate": 2.9689666666666665e-05, "loss": 0.0136, "step": 10934 }, { "epoch": 20.17543859649123, "grad_norm": 0.21357586979866028, "learning_rate": 2.9689333333333334e-05, "loss": 0.0075, "step": 10935 }, { "epoch": 20.177285318559555, "grad_norm": 0.2256045937538147, "learning_rate": 2.9689000000000003e-05, "loss": 0.0073, "step": 10936 }, { "epoch": 20.179132040627884, "grad_norm": 1.3588895797729492, "learning_rate": 2.9688666666666666e-05, "loss": 0.0132, "step": 10937 }, { "epoch": 20.180978762696213, "grad_norm": 0.20366744697093964, "learning_rate": 2.9688333333333335e-05, "loss": 0.0066, "step": 10938 }, { "epoch": 20.182825484764543, "grad_norm": 0.20821727812290192, "learning_rate": 2.9688e-05, "loss": 0.0058, "step": 10939 }, { "epoch": 20.18467220683287, "grad_norm": 0.15946674346923828, "learning_rate": 2.9687666666666667e-05, "loss": 0.0052, "step": 10940 }, { "epoch": 20.1865189289012, "grad_norm": 0.46555081009864807, "learning_rate": 2.9687333333333333e-05, "loss": 0.1196, "step": 10941 }, { "epoch": 20.18836565096953, "grad_norm": 0.5835827589035034, "learning_rate": 2.9687000000000002e-05, "loss": 0.1611, "step": 10942 }, { "epoch": 20.19021237303786, "grad_norm": 0.5876694321632385, "learning_rate": 2.9686666666666665e-05, "loss": 0.1476, "step": 10943 }, { "epoch": 20.19205909510619, "grad_norm": 0.37970054149627686, "learning_rate": 2.9686333333333334e-05, "loss": 0.0706, "step": 10944 }, { "epoch": 20.193905817174514, "grad_norm": 0.5568404793739319, "learning_rate": 2.9686000000000003e-05, "loss": 0.0907, "step": 10945 }, { "epoch": 20.195752539242843, "grad_norm": 0.3703577220439911, "learning_rate": 2.9685666666666666e-05, "loss": 0.0511, "step": 10946 }, { "epoch": 20.197599261311172, "grad_norm": 0.4699932634830475, "learning_rate": 2.9685333333333335e-05, "loss": 0.0769, "step": 10947 }, { "epoch": 20.1994459833795, "grad_norm": 0.50783371925354, "learning_rate": 2.9685e-05, "loss": 0.0718, "step": 10948 }, { "epoch": 20.20129270544783, "grad_norm": 0.47592678666114807, "learning_rate": 2.9684666666666667e-05, "loss": 0.0514, "step": 10949 }, { "epoch": 20.20313942751616, "grad_norm": 0.4134548306465149, "learning_rate": 2.9684333333333333e-05, "loss": 0.0467, "step": 10950 }, { "epoch": 20.20498614958449, "grad_norm": 0.4005981981754303, "learning_rate": 2.9684000000000002e-05, "loss": 0.0494, "step": 10951 }, { "epoch": 20.206832871652818, "grad_norm": 0.35495683550834656, "learning_rate": 2.968366666666667e-05, "loss": 0.0497, "step": 10952 }, { "epoch": 20.208679593721143, "grad_norm": 0.3859303593635559, "learning_rate": 2.9683333333333334e-05, "loss": 0.0386, "step": 10953 }, { "epoch": 20.210526315789473, "grad_norm": 0.4294539988040924, "learning_rate": 2.9683000000000004e-05, "loss": 0.0125, "step": 10954 }, { "epoch": 20.2123730378578, "grad_norm": 0.23544616997241974, "learning_rate": 2.9682666666666666e-05, "loss": 0.0209, "step": 10955 }, { "epoch": 20.21421975992613, "grad_norm": 0.2471626102924347, "learning_rate": 2.9682333333333335e-05, "loss": 0.0429, "step": 10956 }, { "epoch": 20.21606648199446, "grad_norm": 0.2147773653268814, "learning_rate": 2.9681999999999998e-05, "loss": 0.0265, "step": 10957 }, { "epoch": 20.21791320406279, "grad_norm": 0.20125578343868256, "learning_rate": 2.9681666666666667e-05, "loss": 0.0147, "step": 10958 }, { "epoch": 20.21975992613112, "grad_norm": 0.37904635071754456, "learning_rate": 2.9681333333333333e-05, "loss": 0.0075, "step": 10959 }, { "epoch": 20.221606648199447, "grad_norm": 0.4701364040374756, "learning_rate": 2.9681e-05, "loss": 0.0311, "step": 10960 }, { "epoch": 20.223453370267773, "grad_norm": 0.4250022768974304, "learning_rate": 2.968066666666667e-05, "loss": 0.0734, "step": 10961 }, { "epoch": 20.225300092336102, "grad_norm": 0.22264167666435242, "learning_rate": 2.9680333333333334e-05, "loss": 0.0054, "step": 10962 }, { "epoch": 20.22714681440443, "grad_norm": 0.08203784376382828, "learning_rate": 2.968e-05, "loss": 0.0032, "step": 10963 }, { "epoch": 20.22899353647276, "grad_norm": 0.2587185204029083, "learning_rate": 2.9679666666666666e-05, "loss": 0.0142, "step": 10964 }, { "epoch": 20.23084025854109, "grad_norm": 0.47471538186073303, "learning_rate": 2.9679333333333336e-05, "loss": 0.0133, "step": 10965 }, { "epoch": 20.23268698060942, "grad_norm": 0.612973690032959, "learning_rate": 2.9678999999999998e-05, "loss": 0.0109, "step": 10966 }, { "epoch": 20.234533702677748, "grad_norm": 0.4560556411743164, "learning_rate": 2.9678666666666668e-05, "loss": 0.0105, "step": 10967 }, { "epoch": 20.236380424746077, "grad_norm": 0.25839492678642273, "learning_rate": 2.9678333333333334e-05, "loss": 0.0099, "step": 10968 }, { "epoch": 20.238227146814406, "grad_norm": 0.16658927500247955, "learning_rate": 2.9678e-05, "loss": 0.0078, "step": 10969 }, { "epoch": 20.24007386888273, "grad_norm": 0.37365013360977173, "learning_rate": 2.967766666666667e-05, "loss": 0.0072, "step": 10970 }, { "epoch": 20.24192059095106, "grad_norm": 0.2552949786186218, "learning_rate": 2.9677333333333335e-05, "loss": 0.0802, "step": 10971 }, { "epoch": 20.24376731301939, "grad_norm": 0.37917381525039673, "learning_rate": 2.9677e-05, "loss": 0.0102, "step": 10972 }, { "epoch": 20.24561403508772, "grad_norm": 0.2963426411151886, "learning_rate": 2.9676666666666667e-05, "loss": 0.0123, "step": 10973 }, { "epoch": 20.24746075715605, "grad_norm": 0.20354008674621582, "learning_rate": 2.9676333333333336e-05, "loss": 0.0096, "step": 10974 }, { "epoch": 20.249307479224377, "grad_norm": 0.18017128109931946, "learning_rate": 2.9676e-05, "loss": 0.0049, "step": 10975 }, { "epoch": 20.251154201292707, "grad_norm": 0.5300956964492798, "learning_rate": 2.9675666666666668e-05, "loss": 0.0078, "step": 10976 }, { "epoch": 20.253000923361036, "grad_norm": 0.31972944736480713, "learning_rate": 2.9675333333333337e-05, "loss": 0.0069, "step": 10977 }, { "epoch": 20.25484764542936, "grad_norm": 0.28348520398139954, "learning_rate": 2.9675e-05, "loss": 0.0096, "step": 10978 }, { "epoch": 20.25669436749769, "grad_norm": 0.4289419949054718, "learning_rate": 2.967466666666667e-05, "loss": 0.0149, "step": 10979 }, { "epoch": 20.25854108956602, "grad_norm": 0.3754739463329315, "learning_rate": 2.9674333333333335e-05, "loss": 0.0074, "step": 10980 }, { "epoch": 20.26038781163435, "grad_norm": 0.2263016402721405, "learning_rate": 2.9674e-05, "loss": 0.0058, "step": 10981 }, { "epoch": 20.262234533702678, "grad_norm": 0.2127392590045929, "learning_rate": 2.9673666666666667e-05, "loss": 0.0095, "step": 10982 }, { "epoch": 20.264081255771007, "grad_norm": 0.2642723321914673, "learning_rate": 2.9673333333333336e-05, "loss": 0.0076, "step": 10983 }, { "epoch": 20.265927977839336, "grad_norm": 0.7602730393409729, "learning_rate": 2.9673e-05, "loss": 0.0221, "step": 10984 }, { "epoch": 20.267774699907665, "grad_norm": 0.36623716354370117, "learning_rate": 2.9672666666666668e-05, "loss": 0.0107, "step": 10985 }, { "epoch": 20.26962142197599, "grad_norm": 0.3910073935985565, "learning_rate": 2.9672333333333334e-05, "loss": 0.0084, "step": 10986 }, { "epoch": 20.27146814404432, "grad_norm": 0.412586510181427, "learning_rate": 2.9672e-05, "loss": 0.0073, "step": 10987 }, { "epoch": 20.27331486611265, "grad_norm": 0.3259273171424866, "learning_rate": 2.967166666666667e-05, "loss": 0.0054, "step": 10988 }, { "epoch": 20.27516158818098, "grad_norm": 0.15527020394802094, "learning_rate": 2.9671333333333332e-05, "loss": 0.0077, "step": 10989 }, { "epoch": 20.277008310249307, "grad_norm": 0.34989339113235474, "learning_rate": 2.9671e-05, "loss": 0.0196, "step": 10990 }, { "epoch": 20.278855032317637, "grad_norm": 0.5065251588821411, "learning_rate": 2.9670666666666667e-05, "loss": 0.1234, "step": 10991 }, { "epoch": 20.280701754385966, "grad_norm": 0.5200304388999939, "learning_rate": 2.9670333333333333e-05, "loss": 0.201, "step": 10992 }, { "epoch": 20.282548476454295, "grad_norm": 0.5644885897636414, "learning_rate": 2.967e-05, "loss": 0.1444, "step": 10993 }, { "epoch": 20.284395198522624, "grad_norm": 0.5528456568717957, "learning_rate": 2.9669666666666668e-05, "loss": 0.1003, "step": 10994 }, { "epoch": 20.28624192059095, "grad_norm": 0.5606066584587097, "learning_rate": 2.9669333333333334e-05, "loss": 0.0624, "step": 10995 }, { "epoch": 20.28808864265928, "grad_norm": 0.5903345346450806, "learning_rate": 2.9669e-05, "loss": 0.0414, "step": 10996 }, { "epoch": 20.289935364727608, "grad_norm": 0.471769779920578, "learning_rate": 2.966866666666667e-05, "loss": 0.0666, "step": 10997 }, { "epoch": 20.291782086795937, "grad_norm": 0.5256743431091309, "learning_rate": 2.9668333333333332e-05, "loss": 0.0724, "step": 10998 }, { "epoch": 20.293628808864266, "grad_norm": 0.41306188702583313, "learning_rate": 2.9668e-05, "loss": 0.0388, "step": 10999 }, { "epoch": 20.295475530932595, "grad_norm": 0.46824705600738525, "learning_rate": 2.9667666666666667e-05, "loss": 0.0516, "step": 11000 }, { "epoch": 20.295475530932595, "eval_cer": 0.1074992871400057, "eval_loss": 0.30857551097869873, "eval_runtime": 16.2156, "eval_samples_per_second": 18.747, "eval_steps_per_second": 0.617, "eval_wer": 0.37010744435917114, "step": 11000 }, { "epoch": 20.297322253000925, "grad_norm": 0.4235368072986603, "learning_rate": 2.9667333333333333e-05, "loss": 0.0344, "step": 11001 }, { "epoch": 20.299168975069254, "grad_norm": 0.6765577793121338, "learning_rate": 2.9667000000000002e-05, "loss": 0.0277, "step": 11002 }, { "epoch": 20.30101569713758, "grad_norm": 0.276685893535614, "learning_rate": 2.966666666666667e-05, "loss": 0.0208, "step": 11003 }, { "epoch": 20.30286241920591, "grad_norm": 0.2812119722366333, "learning_rate": 2.9666333333333334e-05, "loss": 0.024, "step": 11004 }, { "epoch": 20.304709141274238, "grad_norm": 0.33644866943359375, "learning_rate": 2.9666e-05, "loss": 0.028, "step": 11005 }, { "epoch": 20.306555863342567, "grad_norm": 0.198531374335289, "learning_rate": 2.966566666666667e-05, "loss": 0.0174, "step": 11006 }, { "epoch": 20.308402585410896, "grad_norm": 0.19303059577941895, "learning_rate": 2.9665333333333332e-05, "loss": 0.0096, "step": 11007 }, { "epoch": 20.310249307479225, "grad_norm": 0.26009589433670044, "learning_rate": 2.9665e-05, "loss": 0.0146, "step": 11008 }, { "epoch": 20.312096029547554, "grad_norm": 0.34054481983184814, "learning_rate": 2.9664666666666667e-05, "loss": 0.0158, "step": 11009 }, { "epoch": 20.313942751615883, "grad_norm": 0.1274450719356537, "learning_rate": 2.9664333333333333e-05, "loss": 0.0059, "step": 11010 }, { "epoch": 20.31578947368421, "grad_norm": 0.41350147128105164, "learning_rate": 2.9664000000000003e-05, "loss": 0.0063, "step": 11011 }, { "epoch": 20.317636195752538, "grad_norm": 0.39728933572769165, "learning_rate": 2.966366666666667e-05, "loss": 0.0108, "step": 11012 }, { "epoch": 20.319482917820867, "grad_norm": 0.3444744646549225, "learning_rate": 2.9663333333333334e-05, "loss": 0.0109, "step": 11013 }, { "epoch": 20.321329639889196, "grad_norm": 0.30115723609924316, "learning_rate": 2.9663e-05, "loss": 0.0098, "step": 11014 }, { "epoch": 20.323176361957525, "grad_norm": 0.12758363783359528, "learning_rate": 2.966266666666667e-05, "loss": 0.0034, "step": 11015 }, { "epoch": 20.325023084025855, "grad_norm": 0.32861289381980896, "learning_rate": 2.9662333333333332e-05, "loss": 0.01, "step": 11016 }, { "epoch": 20.326869806094184, "grad_norm": 0.4190202057361603, "learning_rate": 2.9662e-05, "loss": 0.0337, "step": 11017 }, { "epoch": 20.328716528162513, "grad_norm": 0.24281994998455048, "learning_rate": 2.9661666666666664e-05, "loss": 0.0061, "step": 11018 }, { "epoch": 20.330563250230842, "grad_norm": 0.2888421416282654, "learning_rate": 2.9661333333333333e-05, "loss": 0.0131, "step": 11019 }, { "epoch": 20.332409972299168, "grad_norm": 0.27400413155555725, "learning_rate": 2.9661000000000003e-05, "loss": 0.0076, "step": 11020 }, { "epoch": 20.334256694367497, "grad_norm": 0.35427457094192505, "learning_rate": 2.9660666666666665e-05, "loss": 0.0121, "step": 11021 }, { "epoch": 20.336103416435826, "grad_norm": 0.27263957262039185, "learning_rate": 2.9660333333333335e-05, "loss": 0.0069, "step": 11022 }, { "epoch": 20.337950138504155, "grad_norm": 0.15267528593540192, "learning_rate": 2.966e-05, "loss": 0.0055, "step": 11023 }, { "epoch": 20.339796860572484, "grad_norm": 0.21431012451648712, "learning_rate": 2.9659666666666667e-05, "loss": 0.0055, "step": 11024 }, { "epoch": 20.341643582640813, "grad_norm": 0.5318107604980469, "learning_rate": 2.9659333333333332e-05, "loss": 0.0098, "step": 11025 }, { "epoch": 20.343490304709142, "grad_norm": 0.07205729931592941, "learning_rate": 2.9659000000000002e-05, "loss": 0.0018, "step": 11026 }, { "epoch": 20.34533702677747, "grad_norm": 0.22027286887168884, "learning_rate": 2.9658666666666668e-05, "loss": 0.0069, "step": 11027 }, { "epoch": 20.347183748845797, "grad_norm": 0.21012485027313232, "learning_rate": 2.9658333333333334e-05, "loss": 0.006, "step": 11028 }, { "epoch": 20.349030470914126, "grad_norm": 0.15638655424118042, "learning_rate": 2.9658000000000003e-05, "loss": 0.0065, "step": 11029 }, { "epoch": 20.350877192982455, "grad_norm": 0.5567883253097534, "learning_rate": 2.9657666666666666e-05, "loss": 0.0186, "step": 11030 }, { "epoch": 20.352723915050785, "grad_norm": 0.5367173552513123, "learning_rate": 2.9657333333333335e-05, "loss": 0.007, "step": 11031 }, { "epoch": 20.354570637119114, "grad_norm": 0.8661971688270569, "learning_rate": 2.9657e-05, "loss": 0.0171, "step": 11032 }, { "epoch": 20.356417359187443, "grad_norm": 0.4001052677631378, "learning_rate": 2.9656666666666667e-05, "loss": 0.0109, "step": 11033 }, { "epoch": 20.358264081255772, "grad_norm": 0.24180084466934204, "learning_rate": 2.9656333333333333e-05, "loss": 0.0051, "step": 11034 }, { "epoch": 20.3601108033241, "grad_norm": 0.5838049650192261, "learning_rate": 2.9656000000000002e-05, "loss": 0.0061, "step": 11035 }, { "epoch": 20.361957525392427, "grad_norm": 0.35159727931022644, "learning_rate": 2.9655666666666668e-05, "loss": 0.011, "step": 11036 }, { "epoch": 20.363804247460756, "grad_norm": 0.3918987214565277, "learning_rate": 2.9655333333333334e-05, "loss": 0.0112, "step": 11037 }, { "epoch": 20.365650969529085, "grad_norm": 0.1534157693386078, "learning_rate": 2.9655000000000003e-05, "loss": 0.0059, "step": 11038 }, { "epoch": 20.367497691597414, "grad_norm": 0.6456652879714966, "learning_rate": 2.9654666666666666e-05, "loss": 0.02, "step": 11039 }, { "epoch": 20.369344413665743, "grad_norm": 0.2508772909641266, "learning_rate": 2.9654333333333335e-05, "loss": 0.0045, "step": 11040 }, { "epoch": 20.371191135734072, "grad_norm": 0.5599384307861328, "learning_rate": 2.9654e-05, "loss": 0.1466, "step": 11041 }, { "epoch": 20.3730378578024, "grad_norm": 0.5619744062423706, "learning_rate": 2.9653666666666667e-05, "loss": 0.1297, "step": 11042 }, { "epoch": 20.37488457987073, "grad_norm": 0.5407826900482178, "learning_rate": 2.9653333333333333e-05, "loss": 0.1643, "step": 11043 }, { "epoch": 20.37673130193906, "grad_norm": 0.7417288422584534, "learning_rate": 2.9653000000000002e-05, "loss": 0.0849, "step": 11044 }, { "epoch": 20.378578024007385, "grad_norm": 0.357843279838562, "learning_rate": 2.9652666666666668e-05, "loss": 0.0829, "step": 11045 }, { "epoch": 20.380424746075715, "grad_norm": 0.5893060564994812, "learning_rate": 2.9652333333333334e-05, "loss": 0.1218, "step": 11046 }, { "epoch": 20.382271468144044, "grad_norm": 0.35972005128860474, "learning_rate": 2.9652e-05, "loss": 0.042, "step": 11047 }, { "epoch": 20.384118190212373, "grad_norm": 0.4211483895778656, "learning_rate": 2.9651666666666666e-05, "loss": 0.055, "step": 11048 }, { "epoch": 20.385964912280702, "grad_norm": 0.5754496455192566, "learning_rate": 2.9651333333333335e-05, "loss": 0.0691, "step": 11049 }, { "epoch": 20.38781163434903, "grad_norm": 0.38568854331970215, "learning_rate": 2.9650999999999998e-05, "loss": 0.0362, "step": 11050 }, { "epoch": 20.38965835641736, "grad_norm": 0.2110164612531662, "learning_rate": 2.9650666666666667e-05, "loss": 0.0279, "step": 11051 }, { "epoch": 20.39150507848569, "grad_norm": 0.5566184520721436, "learning_rate": 2.9650333333333336e-05, "loss": 0.0469, "step": 11052 }, { "epoch": 20.393351800554015, "grad_norm": 0.38283446431159973, "learning_rate": 2.965e-05, "loss": 0.0326, "step": 11053 }, { "epoch": 20.395198522622344, "grad_norm": 0.2858939468860626, "learning_rate": 2.964966666666667e-05, "loss": 0.036, "step": 11054 }, { "epoch": 20.397045244690673, "grad_norm": 0.22442243993282318, "learning_rate": 2.9649333333333334e-05, "loss": 0.0156, "step": 11055 }, { "epoch": 20.398891966759003, "grad_norm": 0.2950870096683502, "learning_rate": 2.9649e-05, "loss": 0.0242, "step": 11056 }, { "epoch": 20.40073868882733, "grad_norm": 0.20051699876785278, "learning_rate": 2.9648666666666666e-05, "loss": 0.0054, "step": 11057 }, { "epoch": 20.40258541089566, "grad_norm": 0.15492866933345795, "learning_rate": 2.9648333333333335e-05, "loss": 0.0067, "step": 11058 }, { "epoch": 20.40443213296399, "grad_norm": 0.34917473793029785, "learning_rate": 2.9647999999999998e-05, "loss": 0.0182, "step": 11059 }, { "epoch": 20.40627885503232, "grad_norm": 0.18603120744228363, "learning_rate": 2.9647666666666667e-05, "loss": 0.008, "step": 11060 }, { "epoch": 20.408125577100645, "grad_norm": 0.3769189715385437, "learning_rate": 2.9647333333333337e-05, "loss": 0.0078, "step": 11061 }, { "epoch": 20.409972299168974, "grad_norm": 0.18756088614463806, "learning_rate": 2.9647e-05, "loss": 0.0203, "step": 11062 }, { "epoch": 20.411819021237303, "grad_norm": 0.25873249769210815, "learning_rate": 2.964666666666667e-05, "loss": 0.0101, "step": 11063 }, { "epoch": 20.413665743305632, "grad_norm": 0.2606721520423889, "learning_rate": 2.9646333333333334e-05, "loss": 0.0067, "step": 11064 }, { "epoch": 20.41551246537396, "grad_norm": 0.277649849653244, "learning_rate": 2.9646e-05, "loss": 0.0089, "step": 11065 }, { "epoch": 20.41735918744229, "grad_norm": 0.2441006600856781, "learning_rate": 2.9645666666666666e-05, "loss": 0.0083, "step": 11066 }, { "epoch": 20.41920590951062, "grad_norm": 0.17656227946281433, "learning_rate": 2.9645333333333336e-05, "loss": 0.0067, "step": 11067 }, { "epoch": 20.42105263157895, "grad_norm": 0.18494001030921936, "learning_rate": 2.9644999999999998e-05, "loss": 0.0085, "step": 11068 }, { "epoch": 20.422899353647278, "grad_norm": 0.27652615308761597, "learning_rate": 2.9644666666666668e-05, "loss": 0.0077, "step": 11069 }, { "epoch": 20.424746075715603, "grad_norm": 0.2581216096878052, "learning_rate": 2.9644333333333337e-05, "loss": 0.0068, "step": 11070 }, { "epoch": 20.426592797783933, "grad_norm": 0.2612696588039398, "learning_rate": 2.9644e-05, "loss": 0.0083, "step": 11071 }, { "epoch": 20.42843951985226, "grad_norm": 0.07239628583192825, "learning_rate": 2.964366666666667e-05, "loss": 0.002, "step": 11072 }, { "epoch": 20.43028624192059, "grad_norm": 0.29392409324645996, "learning_rate": 2.9643333333333335e-05, "loss": 0.0112, "step": 11073 }, { "epoch": 20.43213296398892, "grad_norm": 0.5558052062988281, "learning_rate": 2.9643e-05, "loss": 0.0073, "step": 11074 }, { "epoch": 20.43397968605725, "grad_norm": 0.23736676573753357, "learning_rate": 2.9642666666666667e-05, "loss": 0.0074, "step": 11075 }, { "epoch": 20.43582640812558, "grad_norm": 0.29687535762786865, "learning_rate": 2.9642333333333336e-05, "loss": 0.0087, "step": 11076 }, { "epoch": 20.437673130193907, "grad_norm": 0.13251857459545135, "learning_rate": 2.9642000000000002e-05, "loss": 0.0049, "step": 11077 }, { "epoch": 20.439519852262233, "grad_norm": 0.23094314336776733, "learning_rate": 2.9641666666666668e-05, "loss": 0.0053, "step": 11078 }, { "epoch": 20.441366574330562, "grad_norm": 0.283113956451416, "learning_rate": 2.9641333333333334e-05, "loss": 0.0108, "step": 11079 }, { "epoch": 20.44321329639889, "grad_norm": 0.32321351766586304, "learning_rate": 2.9641e-05, "loss": 0.0084, "step": 11080 }, { "epoch": 20.44506001846722, "grad_norm": 0.47560450434684753, "learning_rate": 2.964066666666667e-05, "loss": 0.0148, "step": 11081 }, { "epoch": 20.44690674053555, "grad_norm": 0.4224452078342438, "learning_rate": 2.964033333333333e-05, "loss": 0.0163, "step": 11082 }, { "epoch": 20.44875346260388, "grad_norm": 0.216268852353096, "learning_rate": 2.964e-05, "loss": 0.0041, "step": 11083 }, { "epoch": 20.450600184672208, "grad_norm": 0.7119795680046082, "learning_rate": 2.9639666666666667e-05, "loss": 0.0102, "step": 11084 }, { "epoch": 20.452446906740537, "grad_norm": 0.3483209013938904, "learning_rate": 2.9639333333333333e-05, "loss": 0.0078, "step": 11085 }, { "epoch": 20.454293628808863, "grad_norm": 0.3994562327861786, "learning_rate": 2.9639000000000002e-05, "loss": 0.0054, "step": 11086 }, { "epoch": 20.45614035087719, "grad_norm": 0.2146139144897461, "learning_rate": 2.9638666666666668e-05, "loss": 0.0079, "step": 11087 }, { "epoch": 20.45798707294552, "grad_norm": 0.3482670783996582, "learning_rate": 2.9638333333333334e-05, "loss": 0.007, "step": 11088 }, { "epoch": 20.45983379501385, "grad_norm": 0.20434588193893433, "learning_rate": 2.9638e-05, "loss": 0.0048, "step": 11089 }, { "epoch": 20.46168051708218, "grad_norm": 0.137790709733963, "learning_rate": 2.963766666666667e-05, "loss": 0.0045, "step": 11090 }, { "epoch": 20.46352723915051, "grad_norm": 0.6156465411186218, "learning_rate": 2.963733333333333e-05, "loss": 0.1216, "step": 11091 }, { "epoch": 20.465373961218837, "grad_norm": 0.4434069097042084, "learning_rate": 2.9637e-05, "loss": 0.0953, "step": 11092 }, { "epoch": 20.467220683287167, "grad_norm": 0.4450667202472687, "learning_rate": 2.9636666666666667e-05, "loss": 0.0877, "step": 11093 }, { "epoch": 20.469067405355496, "grad_norm": 0.4277558922767639, "learning_rate": 2.9636333333333333e-05, "loss": 0.0637, "step": 11094 }, { "epoch": 20.47091412742382, "grad_norm": 0.5416181087493896, "learning_rate": 2.9636000000000002e-05, "loss": 0.102, "step": 11095 }, { "epoch": 20.47276084949215, "grad_norm": 0.5285162329673767, "learning_rate": 2.9635666666666668e-05, "loss": 0.0814, "step": 11096 }, { "epoch": 20.47460757156048, "grad_norm": 0.4584277868270874, "learning_rate": 2.9635333333333334e-05, "loss": 0.0593, "step": 11097 }, { "epoch": 20.47645429362881, "grad_norm": 0.3677422106266022, "learning_rate": 2.9635e-05, "loss": 0.0497, "step": 11098 }, { "epoch": 20.478301015697138, "grad_norm": 0.4277951717376709, "learning_rate": 2.963466666666667e-05, "loss": 0.0475, "step": 11099 }, { "epoch": 20.480147737765467, "grad_norm": 0.31714293360710144, "learning_rate": 2.9634333333333332e-05, "loss": 0.0422, "step": 11100 }, { "epoch": 20.481994459833796, "grad_norm": 0.26784515380859375, "learning_rate": 2.9634e-05, "loss": 0.0283, "step": 11101 }, { "epoch": 20.483841181902125, "grad_norm": 0.44170352816581726, "learning_rate": 2.963366666666667e-05, "loss": 0.0497, "step": 11102 }, { "epoch": 20.48568790397045, "grad_norm": 1.118957757949829, "learning_rate": 2.9633333333333333e-05, "loss": 0.1057, "step": 11103 }, { "epoch": 20.48753462603878, "grad_norm": 0.6414685845375061, "learning_rate": 2.9633000000000002e-05, "loss": 0.0244, "step": 11104 }, { "epoch": 20.48938134810711, "grad_norm": 0.27597081661224365, "learning_rate": 2.963266666666667e-05, "loss": 0.0329, "step": 11105 }, { "epoch": 20.49122807017544, "grad_norm": 0.27471840381622314, "learning_rate": 2.9632333333333334e-05, "loss": 0.0144, "step": 11106 }, { "epoch": 20.493074792243767, "grad_norm": 0.3149142265319824, "learning_rate": 2.9632e-05, "loss": 0.0455, "step": 11107 }, { "epoch": 20.494921514312097, "grad_norm": 0.19767074286937714, "learning_rate": 2.9631666666666666e-05, "loss": 0.0082, "step": 11108 }, { "epoch": 20.496768236380426, "grad_norm": 0.21706773340702057, "learning_rate": 2.9631333333333332e-05, "loss": 0.0102, "step": 11109 }, { "epoch": 20.498614958448755, "grad_norm": 0.2238224446773529, "learning_rate": 2.9631e-05, "loss": 0.0078, "step": 11110 }, { "epoch": 20.50046168051708, "grad_norm": 0.30950242280960083, "learning_rate": 2.9630666666666667e-05, "loss": 0.0295, "step": 11111 }, { "epoch": 20.50230840258541, "grad_norm": 0.35704588890075684, "learning_rate": 2.9630333333333333e-05, "loss": 0.0102, "step": 11112 }, { "epoch": 20.50415512465374, "grad_norm": 0.16436102986335754, "learning_rate": 2.9630000000000003e-05, "loss": 0.0106, "step": 11113 }, { "epoch": 20.506001846722068, "grad_norm": 0.1160278245806694, "learning_rate": 2.9629666666666665e-05, "loss": 0.0083, "step": 11114 }, { "epoch": 20.507848568790397, "grad_norm": 0.15938252210617065, "learning_rate": 2.9629333333333334e-05, "loss": 0.0056, "step": 11115 }, { "epoch": 20.509695290858726, "grad_norm": 0.1849542111158371, "learning_rate": 2.9629e-05, "loss": 0.0063, "step": 11116 }, { "epoch": 20.511542012927055, "grad_norm": 0.20571953058242798, "learning_rate": 2.9628666666666666e-05, "loss": 0.005, "step": 11117 }, { "epoch": 20.513388734995385, "grad_norm": 0.361489474773407, "learning_rate": 2.9628333333333332e-05, "loss": 0.014, "step": 11118 }, { "epoch": 20.51523545706371, "grad_norm": 0.37601128220558167, "learning_rate": 2.9628e-05, "loss": 0.0427, "step": 11119 }, { "epoch": 20.51708217913204, "grad_norm": 0.3030451536178589, "learning_rate": 2.9627666666666668e-05, "loss": 0.0216, "step": 11120 }, { "epoch": 20.51892890120037, "grad_norm": 0.20974400639533997, "learning_rate": 2.9627333333333333e-05, "loss": 0.0072, "step": 11121 }, { "epoch": 20.520775623268698, "grad_norm": 0.47172707319259644, "learning_rate": 2.9627000000000003e-05, "loss": 0.0201, "step": 11122 }, { "epoch": 20.522622345337027, "grad_norm": 0.4215916097164154, "learning_rate": 2.9626666666666665e-05, "loss": 0.0121, "step": 11123 }, { "epoch": 20.524469067405356, "grad_norm": 0.27536407113075256, "learning_rate": 2.9626333333333335e-05, "loss": 0.0067, "step": 11124 }, { "epoch": 20.526315789473685, "grad_norm": 0.1958393007516861, "learning_rate": 2.9626e-05, "loss": 0.0049, "step": 11125 }, { "epoch": 20.528162511542014, "grad_norm": 0.35039401054382324, "learning_rate": 2.9625666666666667e-05, "loss": 0.0093, "step": 11126 }, { "epoch": 20.530009233610343, "grad_norm": 0.32164257764816284, "learning_rate": 2.9625333333333336e-05, "loss": 0.011, "step": 11127 }, { "epoch": 20.53185595567867, "grad_norm": 0.4968523681163788, "learning_rate": 2.9625000000000002e-05, "loss": 0.0134, "step": 11128 }, { "epoch": 20.533702677746998, "grad_norm": 0.21272744238376617, "learning_rate": 2.9624666666666668e-05, "loss": 0.0042, "step": 11129 }, { "epoch": 20.535549399815327, "grad_norm": 0.5690340995788574, "learning_rate": 2.9624333333333334e-05, "loss": 0.0095, "step": 11130 }, { "epoch": 20.537396121883656, "grad_norm": 0.2647596597671509, "learning_rate": 2.9624000000000003e-05, "loss": 0.0067, "step": 11131 }, { "epoch": 20.539242843951985, "grad_norm": 0.33502963185310364, "learning_rate": 2.9623666666666666e-05, "loss": 0.0091, "step": 11132 }, { "epoch": 20.541089566020315, "grad_norm": 0.4991375207901001, "learning_rate": 2.9623333333333335e-05, "loss": 0.0105, "step": 11133 }, { "epoch": 20.542936288088644, "grad_norm": 0.8269820809364319, "learning_rate": 2.9623e-05, "loss": 0.0098, "step": 11134 }, { "epoch": 20.544783010156973, "grad_norm": 0.13691353797912598, "learning_rate": 2.9622666666666667e-05, "loss": 0.0061, "step": 11135 }, { "epoch": 20.5466297322253, "grad_norm": 0.28916093707084656, "learning_rate": 2.9622333333333336e-05, "loss": 0.0084, "step": 11136 }, { "epoch": 20.548476454293628, "grad_norm": 0.5460681319236755, "learning_rate": 2.9622000000000002e-05, "loss": 0.0335, "step": 11137 }, { "epoch": 20.550323176361957, "grad_norm": 0.7758886814117432, "learning_rate": 2.9621666666666668e-05, "loss": 0.0142, "step": 11138 }, { "epoch": 20.552169898430286, "grad_norm": 0.2703908383846283, "learning_rate": 2.9621333333333334e-05, "loss": 0.0045, "step": 11139 }, { "epoch": 20.554016620498615, "grad_norm": 0.3861091434955597, "learning_rate": 2.9621e-05, "loss": 0.0199, "step": 11140 }, { "epoch": 20.555863342566944, "grad_norm": 0.5069757699966431, "learning_rate": 2.9620666666666666e-05, "loss": 0.1421, "step": 11141 }, { "epoch": 20.557710064635273, "grad_norm": 0.5670900344848633, "learning_rate": 2.9620333333333335e-05, "loss": 0.1451, "step": 11142 }, { "epoch": 20.559556786703602, "grad_norm": 0.7019637823104858, "learning_rate": 2.9619999999999998e-05, "loss": 0.1063, "step": 11143 }, { "epoch": 20.56140350877193, "grad_norm": 0.49798068404197693, "learning_rate": 2.9619666666666667e-05, "loss": 0.0769, "step": 11144 }, { "epoch": 20.563250230840257, "grad_norm": 0.5604007840156555, "learning_rate": 2.9619333333333336e-05, "loss": 0.1241, "step": 11145 }, { "epoch": 20.565096952908586, "grad_norm": 0.4972505271434784, "learning_rate": 2.9619e-05, "loss": 0.0856, "step": 11146 }, { "epoch": 20.566943674976915, "grad_norm": 0.470397412776947, "learning_rate": 2.9618666666666668e-05, "loss": 0.0645, "step": 11147 }, { "epoch": 20.568790397045245, "grad_norm": 0.5640853643417358, "learning_rate": 2.9618333333333334e-05, "loss": 0.0911, "step": 11148 }, { "epoch": 20.570637119113574, "grad_norm": 0.4170297682285309, "learning_rate": 2.9618e-05, "loss": 0.0676, "step": 11149 }, { "epoch": 20.572483841181903, "grad_norm": 0.3935358226299286, "learning_rate": 2.9617666666666666e-05, "loss": 0.0803, "step": 11150 }, { "epoch": 20.574330563250232, "grad_norm": 0.3682790696620941, "learning_rate": 2.9617333333333335e-05, "loss": 0.0415, "step": 11151 }, { "epoch": 20.57617728531856, "grad_norm": 0.28599610924720764, "learning_rate": 2.9617e-05, "loss": 0.0252, "step": 11152 }, { "epoch": 20.578024007386887, "grad_norm": 0.31602001190185547, "learning_rate": 2.9616666666666667e-05, "loss": 0.0568, "step": 11153 }, { "epoch": 20.579870729455216, "grad_norm": 0.45824578404426575, "learning_rate": 2.9616333333333336e-05, "loss": 0.0582, "step": 11154 }, { "epoch": 20.581717451523545, "grad_norm": 0.35455620288848877, "learning_rate": 2.9616e-05, "loss": 0.0232, "step": 11155 }, { "epoch": 20.583564173591874, "grad_norm": 0.247396320104599, "learning_rate": 2.961566666666667e-05, "loss": 0.0116, "step": 11156 }, { "epoch": 20.585410895660203, "grad_norm": 0.17564378678798676, "learning_rate": 2.9615333333333334e-05, "loss": 0.0103, "step": 11157 }, { "epoch": 20.587257617728532, "grad_norm": 0.5125971436500549, "learning_rate": 2.9615e-05, "loss": 0.0322, "step": 11158 }, { "epoch": 20.58910433979686, "grad_norm": 0.18899650871753693, "learning_rate": 2.9614666666666666e-05, "loss": 0.0117, "step": 11159 }, { "epoch": 20.59095106186519, "grad_norm": 0.35293349623680115, "learning_rate": 2.9614333333333335e-05, "loss": 0.0161, "step": 11160 }, { "epoch": 20.592797783933516, "grad_norm": 0.15498006343841553, "learning_rate": 2.9614e-05, "loss": 0.0053, "step": 11161 }, { "epoch": 20.594644506001845, "grad_norm": 0.24944724142551422, "learning_rate": 2.9613666666666667e-05, "loss": 0.0109, "step": 11162 }, { "epoch": 20.596491228070175, "grad_norm": 0.2556169033050537, "learning_rate": 2.9613333333333337e-05, "loss": 0.0103, "step": 11163 }, { "epoch": 20.598337950138504, "grad_norm": 0.72690749168396, "learning_rate": 2.9613e-05, "loss": 0.0188, "step": 11164 }, { "epoch": 20.600184672206833, "grad_norm": 0.2584100663661957, "learning_rate": 2.961266666666667e-05, "loss": 0.0133, "step": 11165 }, { "epoch": 20.602031394275162, "grad_norm": 0.2549022436141968, "learning_rate": 2.9612333333333334e-05, "loss": 0.0111, "step": 11166 }, { "epoch": 20.60387811634349, "grad_norm": 0.6710214018821716, "learning_rate": 2.9612e-05, "loss": 0.0151, "step": 11167 }, { "epoch": 20.60572483841182, "grad_norm": 0.43617209792137146, "learning_rate": 2.9611666666666666e-05, "loss": 0.0092, "step": 11168 }, { "epoch": 20.607571560480146, "grad_norm": 0.245264932513237, "learning_rate": 2.9611333333333332e-05, "loss": 0.0119, "step": 11169 }, { "epoch": 20.609418282548475, "grad_norm": 0.20647655427455902, "learning_rate": 2.9611e-05, "loss": 0.007, "step": 11170 }, { "epoch": 20.611265004616804, "grad_norm": 0.4248480498790741, "learning_rate": 2.9610666666666668e-05, "loss": 0.0096, "step": 11171 }, { "epoch": 20.613111726685133, "grad_norm": 0.36499524116516113, "learning_rate": 2.9610333333333333e-05, "loss": 0.0136, "step": 11172 }, { "epoch": 20.614958448753463, "grad_norm": 0.5641077756881714, "learning_rate": 2.961e-05, "loss": 0.0111, "step": 11173 }, { "epoch": 20.61680517082179, "grad_norm": 0.3565385043621063, "learning_rate": 2.960966666666667e-05, "loss": 0.0135, "step": 11174 }, { "epoch": 20.61865189289012, "grad_norm": 0.5080560445785522, "learning_rate": 2.960933333333333e-05, "loss": 0.0158, "step": 11175 }, { "epoch": 20.62049861495845, "grad_norm": 0.12717655301094055, "learning_rate": 2.9609e-05, "loss": 0.0054, "step": 11176 }, { "epoch": 20.62234533702678, "grad_norm": 0.2625749707221985, "learning_rate": 2.960866666666667e-05, "loss": 0.009, "step": 11177 }, { "epoch": 20.624192059095105, "grad_norm": 0.6564839482307434, "learning_rate": 2.9608333333333332e-05, "loss": 0.0474, "step": 11178 }, { "epoch": 20.626038781163434, "grad_norm": 0.20139797031879425, "learning_rate": 2.9608000000000002e-05, "loss": 0.0083, "step": 11179 }, { "epoch": 20.627885503231763, "grad_norm": 0.8026975989341736, "learning_rate": 2.9607666666666668e-05, "loss": 0.0114, "step": 11180 }, { "epoch": 20.629732225300092, "grad_norm": 0.24883051216602325, "learning_rate": 2.9607333333333334e-05, "loss": 0.006, "step": 11181 }, { "epoch": 20.63157894736842, "grad_norm": 0.33926114439964294, "learning_rate": 2.9607e-05, "loss": 0.0101, "step": 11182 }, { "epoch": 20.63342566943675, "grad_norm": 0.8261702656745911, "learning_rate": 2.960666666666667e-05, "loss": 0.0232, "step": 11183 }, { "epoch": 20.63527239150508, "grad_norm": 0.5510255694389343, "learning_rate": 2.960633333333333e-05, "loss": 0.0088, "step": 11184 }, { "epoch": 20.63711911357341, "grad_norm": 0.49745795130729675, "learning_rate": 2.9606e-05, "loss": 0.0144, "step": 11185 }, { "epoch": 20.638965835641734, "grad_norm": 0.30604270100593567, "learning_rate": 2.960566666666667e-05, "loss": 0.0101, "step": 11186 }, { "epoch": 20.640812557710063, "grad_norm": 0.8259574770927429, "learning_rate": 2.9605333333333333e-05, "loss": 0.0117, "step": 11187 }, { "epoch": 20.642659279778393, "grad_norm": 0.3971351683139801, "learning_rate": 2.9605000000000002e-05, "loss": 0.0272, "step": 11188 }, { "epoch": 20.64450600184672, "grad_norm": 0.34049734473228455, "learning_rate": 2.9604666666666668e-05, "loss": 0.0072, "step": 11189 }, { "epoch": 20.64635272391505, "grad_norm": 0.3927757441997528, "learning_rate": 2.9604333333333334e-05, "loss": 0.0136, "step": 11190 }, { "epoch": 20.64819944598338, "grad_norm": 0.7999992370605469, "learning_rate": 2.9604e-05, "loss": 0.1988, "step": 11191 }, { "epoch": 20.65004616805171, "grad_norm": 0.7569670677185059, "learning_rate": 2.960366666666667e-05, "loss": 0.1572, "step": 11192 }, { "epoch": 20.65189289012004, "grad_norm": 0.5596485733985901, "learning_rate": 2.960333333333333e-05, "loss": 0.0999, "step": 11193 }, { "epoch": 20.653739612188367, "grad_norm": 0.45623841881752014, "learning_rate": 2.9603e-05, "loss": 0.0895, "step": 11194 }, { "epoch": 20.655586334256693, "grad_norm": 0.40664881467819214, "learning_rate": 2.960266666666667e-05, "loss": 0.1002, "step": 11195 }, { "epoch": 20.657433056325022, "grad_norm": 0.40975096821784973, "learning_rate": 2.9602333333333333e-05, "loss": 0.0723, "step": 11196 }, { "epoch": 20.65927977839335, "grad_norm": 0.3656075894832611, "learning_rate": 2.9602000000000002e-05, "loss": 0.0678, "step": 11197 }, { "epoch": 20.66112650046168, "grad_norm": 0.35924556851387024, "learning_rate": 2.9601666666666665e-05, "loss": 0.0438, "step": 11198 }, { "epoch": 20.66297322253001, "grad_norm": 0.3122684061527252, "learning_rate": 2.9601333333333334e-05, "loss": 0.0412, "step": 11199 }, { "epoch": 20.66481994459834, "grad_norm": 0.49023181200027466, "learning_rate": 2.9601e-05, "loss": 0.0986, "step": 11200 }, { "epoch": 20.666666666666668, "grad_norm": 0.3593541979789734, "learning_rate": 2.9600666666666666e-05, "loss": 0.0508, "step": 11201 }, { "epoch": 20.668513388734997, "grad_norm": 0.450988233089447, "learning_rate": 2.9600333333333335e-05, "loss": 0.0432, "step": 11202 }, { "epoch": 20.670360110803323, "grad_norm": 0.44862470030784607, "learning_rate": 2.96e-05, "loss": 0.0232, "step": 11203 }, { "epoch": 20.67220683287165, "grad_norm": 0.22612938284873962, "learning_rate": 2.9599666666666667e-05, "loss": 0.0186, "step": 11204 }, { "epoch": 20.67405355493998, "grad_norm": 0.30688098073005676, "learning_rate": 2.9599333333333333e-05, "loss": 0.0207, "step": 11205 }, { "epoch": 20.67590027700831, "grad_norm": 0.21418403089046478, "learning_rate": 2.9599000000000002e-05, "loss": 0.0097, "step": 11206 }, { "epoch": 20.67774699907664, "grad_norm": 0.4343891143798828, "learning_rate": 2.9598666666666665e-05, "loss": 0.0291, "step": 11207 }, { "epoch": 20.67959372114497, "grad_norm": 0.7051688432693481, "learning_rate": 2.9598333333333334e-05, "loss": 0.0312, "step": 11208 }, { "epoch": 20.681440443213297, "grad_norm": 0.2645828127861023, "learning_rate": 2.9598e-05, "loss": 0.0093, "step": 11209 }, { "epoch": 20.683287165281627, "grad_norm": 0.32652685046195984, "learning_rate": 2.9597666666666666e-05, "loss": 0.012, "step": 11210 }, { "epoch": 20.685133887349952, "grad_norm": 0.18169867992401123, "learning_rate": 2.9597333333333335e-05, "loss": 0.0102, "step": 11211 }, { "epoch": 20.68698060941828, "grad_norm": 0.28606170415878296, "learning_rate": 2.9597e-05, "loss": 0.0186, "step": 11212 }, { "epoch": 20.68882733148661, "grad_norm": 0.4741019606590271, "learning_rate": 2.9596666666666667e-05, "loss": 0.0109, "step": 11213 }, { "epoch": 20.69067405355494, "grad_norm": 0.380709171295166, "learning_rate": 2.9596333333333333e-05, "loss": 0.0111, "step": 11214 }, { "epoch": 20.69252077562327, "grad_norm": 0.2992388606071472, "learning_rate": 2.9596000000000003e-05, "loss": 0.0114, "step": 11215 }, { "epoch": 20.694367497691598, "grad_norm": 0.4169728755950928, "learning_rate": 2.9595666666666665e-05, "loss": 0.011, "step": 11216 }, { "epoch": 20.696214219759927, "grad_norm": 0.8805220127105713, "learning_rate": 2.9595333333333334e-05, "loss": 0.0061, "step": 11217 }, { "epoch": 20.698060941828256, "grad_norm": 0.12588036060333252, "learning_rate": 2.9595e-05, "loss": 0.0044, "step": 11218 }, { "epoch": 20.69990766389658, "grad_norm": 0.5865361094474792, "learning_rate": 2.9594666666666666e-05, "loss": 0.0191, "step": 11219 }, { "epoch": 20.70175438596491, "grad_norm": 0.1972983181476593, "learning_rate": 2.9594333333333336e-05, "loss": 0.0075, "step": 11220 }, { "epoch": 20.70360110803324, "grad_norm": 0.15591812133789062, "learning_rate": 2.9594e-05, "loss": 0.004, "step": 11221 }, { "epoch": 20.70544783010157, "grad_norm": 0.29317209124565125, "learning_rate": 2.9593666666666668e-05, "loss": 0.0115, "step": 11222 }, { "epoch": 20.7072945521699, "grad_norm": 0.4665067195892334, "learning_rate": 2.9593333333333333e-05, "loss": 0.015, "step": 11223 }, { "epoch": 20.709141274238227, "grad_norm": 0.48972606658935547, "learning_rate": 2.9593000000000003e-05, "loss": 0.0102, "step": 11224 }, { "epoch": 20.710987996306557, "grad_norm": 0.2213616967201233, "learning_rate": 2.9592666666666665e-05, "loss": 0.0093, "step": 11225 }, { "epoch": 20.712834718374886, "grad_norm": 0.6633093357086182, "learning_rate": 2.9592333333333335e-05, "loss": 0.0152, "step": 11226 }, { "epoch": 20.714681440443215, "grad_norm": 0.8084264993667603, "learning_rate": 2.9592000000000004e-05, "loss": 0.0149, "step": 11227 }, { "epoch": 20.71652816251154, "grad_norm": 0.2868703007698059, "learning_rate": 2.9591666666666667e-05, "loss": 0.0089, "step": 11228 }, { "epoch": 20.71837488457987, "grad_norm": 0.3528291881084442, "learning_rate": 2.9591333333333336e-05, "loss": 0.0092, "step": 11229 }, { "epoch": 20.7202216066482, "grad_norm": 0.26255181431770325, "learning_rate": 2.9591e-05, "loss": 0.0113, "step": 11230 }, { "epoch": 20.722068328716528, "grad_norm": 0.14820496737957, "learning_rate": 2.9590666666666668e-05, "loss": 0.0047, "step": 11231 }, { "epoch": 20.723915050784857, "grad_norm": 0.5360812544822693, "learning_rate": 2.9590333333333334e-05, "loss": 0.012, "step": 11232 }, { "epoch": 20.725761772853186, "grad_norm": 0.18604981899261475, "learning_rate": 2.959e-05, "loss": 0.0051, "step": 11233 }, { "epoch": 20.727608494921515, "grad_norm": 0.4018900692462921, "learning_rate": 2.9589666666666666e-05, "loss": 0.0111, "step": 11234 }, { "epoch": 20.729455216989845, "grad_norm": 0.818670392036438, "learning_rate": 2.9589333333333335e-05, "loss": 0.0166, "step": 11235 }, { "epoch": 20.73130193905817, "grad_norm": 0.243108868598938, "learning_rate": 2.9589e-05, "loss": 0.0046, "step": 11236 }, { "epoch": 20.7331486611265, "grad_norm": 0.13450288772583008, "learning_rate": 2.9588666666666667e-05, "loss": 0.0051, "step": 11237 }, { "epoch": 20.73499538319483, "grad_norm": 0.40568941831588745, "learning_rate": 2.9588333333333336e-05, "loss": 0.0113, "step": 11238 }, { "epoch": 20.736842105263158, "grad_norm": 0.4287243187427521, "learning_rate": 2.9588e-05, "loss": 0.013, "step": 11239 }, { "epoch": 20.738688827331487, "grad_norm": 0.4001462459564209, "learning_rate": 2.9587666666666668e-05, "loss": 0.0114, "step": 11240 }, { "epoch": 20.740535549399816, "grad_norm": 0.7578105926513672, "learning_rate": 2.9587333333333334e-05, "loss": 0.2375, "step": 11241 }, { "epoch": 20.742382271468145, "grad_norm": 0.43286678194999695, "learning_rate": 2.9587e-05, "loss": 0.1031, "step": 11242 }, { "epoch": 20.744228993536474, "grad_norm": 0.4050258994102478, "learning_rate": 2.9586666666666666e-05, "loss": 0.075, "step": 11243 }, { "epoch": 20.746075715604803, "grad_norm": 0.42576155066490173, "learning_rate": 2.9586333333333335e-05, "loss": 0.093, "step": 11244 }, { "epoch": 20.74792243767313, "grad_norm": 0.8448416590690613, "learning_rate": 2.9586e-05, "loss": 0.1502, "step": 11245 }, { "epoch": 20.749769159741458, "grad_norm": 0.46882346272468567, "learning_rate": 2.9585666666666667e-05, "loss": 0.0749, "step": 11246 }, { "epoch": 20.751615881809787, "grad_norm": 0.4115670323371887, "learning_rate": 2.9585333333333336e-05, "loss": 0.0778, "step": 11247 }, { "epoch": 20.753462603878116, "grad_norm": 0.49006929993629456, "learning_rate": 2.9585e-05, "loss": 0.064, "step": 11248 }, { "epoch": 20.755309325946445, "grad_norm": 0.3789594769477844, "learning_rate": 2.9584666666666668e-05, "loss": 0.0396, "step": 11249 }, { "epoch": 20.757156048014775, "grad_norm": 0.5852547883987427, "learning_rate": 2.9584333333333334e-05, "loss": 0.043, "step": 11250 }, { "epoch": 20.759002770083104, "grad_norm": 0.4392510950565338, "learning_rate": 2.9584e-05, "loss": 0.0386, "step": 11251 }, { "epoch": 20.760849492151433, "grad_norm": 0.338418573141098, "learning_rate": 2.958366666666667e-05, "loss": 0.0723, "step": 11252 }, { "epoch": 20.76269621421976, "grad_norm": 0.2645232379436493, "learning_rate": 2.9583333333333335e-05, "loss": 0.0524, "step": 11253 }, { "epoch": 20.764542936288088, "grad_norm": 0.24157041311264038, "learning_rate": 2.9583e-05, "loss": 0.0259, "step": 11254 }, { "epoch": 20.766389658356417, "grad_norm": 0.38394519686698914, "learning_rate": 2.9582666666666667e-05, "loss": 0.0432, "step": 11255 }, { "epoch": 20.768236380424746, "grad_norm": 0.3139944076538086, "learning_rate": 2.9582333333333336e-05, "loss": 0.0203, "step": 11256 }, { "epoch": 20.770083102493075, "grad_norm": 0.46563613414764404, "learning_rate": 2.9582e-05, "loss": 0.031, "step": 11257 }, { "epoch": 20.771929824561404, "grad_norm": 0.30651748180389404, "learning_rate": 2.958166666666667e-05, "loss": 0.0157, "step": 11258 }, { "epoch": 20.773776546629733, "grad_norm": 0.22431708872318268, "learning_rate": 2.958133333333333e-05, "loss": 0.0104, "step": 11259 }, { "epoch": 20.775623268698062, "grad_norm": 0.5509371161460876, "learning_rate": 2.9581e-05, "loss": 0.0187, "step": 11260 }, { "epoch": 20.777469990766388, "grad_norm": 0.22594115138053894, "learning_rate": 2.958066666666667e-05, "loss": 0.0075, "step": 11261 }, { "epoch": 20.779316712834717, "grad_norm": 0.19666936993598938, "learning_rate": 2.9580333333333332e-05, "loss": 0.0088, "step": 11262 }, { "epoch": 20.781163434903046, "grad_norm": 0.22689498960971832, "learning_rate": 2.958e-05, "loss": 0.0057, "step": 11263 }, { "epoch": 20.783010156971375, "grad_norm": 0.394515722990036, "learning_rate": 2.9579666666666667e-05, "loss": 0.0163, "step": 11264 }, { "epoch": 20.784856879039705, "grad_norm": 0.16962333023548126, "learning_rate": 2.9579333333333333e-05, "loss": 0.0056, "step": 11265 }, { "epoch": 20.786703601108034, "grad_norm": 0.20703470706939697, "learning_rate": 2.9579e-05, "loss": 0.016, "step": 11266 }, { "epoch": 20.788550323176363, "grad_norm": 0.29677820205688477, "learning_rate": 2.957866666666667e-05, "loss": 0.0085, "step": 11267 }, { "epoch": 20.790397045244692, "grad_norm": 0.19197024405002594, "learning_rate": 2.9578333333333334e-05, "loss": 0.0052, "step": 11268 }, { "epoch": 20.792243767313018, "grad_norm": 0.4533921182155609, "learning_rate": 2.9578e-05, "loss": 0.0119, "step": 11269 }, { "epoch": 20.794090489381347, "grad_norm": 0.22189201414585114, "learning_rate": 2.957766666666667e-05, "loss": 0.0053, "step": 11270 }, { "epoch": 20.795937211449676, "grad_norm": 0.5494603514671326, "learning_rate": 2.9577333333333332e-05, "loss": 0.0431, "step": 11271 }, { "epoch": 20.797783933518005, "grad_norm": 0.3049589991569519, "learning_rate": 2.9577e-05, "loss": 0.0086, "step": 11272 }, { "epoch": 20.799630655586334, "grad_norm": 0.23531177639961243, "learning_rate": 2.9576666666666668e-05, "loss": 0.0098, "step": 11273 }, { "epoch": 20.801477377654663, "grad_norm": 0.24381548166275024, "learning_rate": 2.9576333333333333e-05, "loss": 0.0053, "step": 11274 }, { "epoch": 20.803324099722992, "grad_norm": 2.4563019275665283, "learning_rate": 2.9576e-05, "loss": 0.0231, "step": 11275 }, { "epoch": 20.80517082179132, "grad_norm": 0.33990874886512756, "learning_rate": 2.957566666666667e-05, "loss": 0.0053, "step": 11276 }, { "epoch": 20.80701754385965, "grad_norm": 0.35326576232910156, "learning_rate": 2.9575333333333335e-05, "loss": 0.0083, "step": 11277 }, { "epoch": 20.808864265927976, "grad_norm": 0.34734803438186646, "learning_rate": 2.9575e-05, "loss": 0.0132, "step": 11278 }, { "epoch": 20.810710987996305, "grad_norm": 0.8471444845199585, "learning_rate": 2.957466666666667e-05, "loss": 0.0208, "step": 11279 }, { "epoch": 20.812557710064635, "grad_norm": 0.4880143105983734, "learning_rate": 2.9574333333333332e-05, "loss": 0.0061, "step": 11280 }, { "epoch": 20.814404432132964, "grad_norm": 0.23279136419296265, "learning_rate": 2.9574000000000002e-05, "loss": 0.006, "step": 11281 }, { "epoch": 20.816251154201293, "grad_norm": 0.19613172113895416, "learning_rate": 2.9573666666666668e-05, "loss": 0.0063, "step": 11282 }, { "epoch": 20.818097876269622, "grad_norm": 0.20421376824378967, "learning_rate": 2.9573333333333334e-05, "loss": 0.0047, "step": 11283 }, { "epoch": 20.81994459833795, "grad_norm": 0.3376994729042053, "learning_rate": 2.9573e-05, "loss": 0.0045, "step": 11284 }, { "epoch": 20.82179132040628, "grad_norm": 0.39231762290000916, "learning_rate": 2.957266666666667e-05, "loss": 0.0188, "step": 11285 }, { "epoch": 20.823638042474606, "grad_norm": 0.10870233923196793, "learning_rate": 2.9572333333333335e-05, "loss": 0.0026, "step": 11286 }, { "epoch": 20.825484764542935, "grad_norm": 0.697699248790741, "learning_rate": 2.9572e-05, "loss": 0.0022, "step": 11287 }, { "epoch": 20.827331486611264, "grad_norm": 0.3991001844406128, "learning_rate": 2.957166666666667e-05, "loss": 0.0127, "step": 11288 }, { "epoch": 20.829178208679593, "grad_norm": 0.9220645427703857, "learning_rate": 2.9571333333333333e-05, "loss": 0.0284, "step": 11289 }, { "epoch": 20.831024930747922, "grad_norm": 0.41163426637649536, "learning_rate": 2.9571000000000002e-05, "loss": 0.0243, "step": 11290 }, { "epoch": 20.83287165281625, "grad_norm": 0.9080430865287781, "learning_rate": 2.9570666666666665e-05, "loss": 0.1517, "step": 11291 }, { "epoch": 20.83471837488458, "grad_norm": 0.43430566787719727, "learning_rate": 2.9570333333333334e-05, "loss": 0.1335, "step": 11292 }, { "epoch": 20.83656509695291, "grad_norm": 0.5547497272491455, "learning_rate": 2.957e-05, "loss": 0.0931, "step": 11293 }, { "epoch": 20.83841181902124, "grad_norm": 0.5292896032333374, "learning_rate": 2.9569666666666666e-05, "loss": 0.1158, "step": 11294 }, { "epoch": 20.840258541089565, "grad_norm": 0.4992031753063202, "learning_rate": 2.9569333333333335e-05, "loss": 0.099, "step": 11295 }, { "epoch": 20.842105263157894, "grad_norm": 0.4766106903553009, "learning_rate": 2.9569e-05, "loss": 0.0715, "step": 11296 }, { "epoch": 20.843951985226223, "grad_norm": 0.5213163495063782, "learning_rate": 2.9568666666666667e-05, "loss": 0.0692, "step": 11297 }, { "epoch": 20.845798707294552, "grad_norm": 0.5166041851043701, "learning_rate": 2.9568333333333333e-05, "loss": 0.0706, "step": 11298 }, { "epoch": 20.84764542936288, "grad_norm": 0.3912571966648102, "learning_rate": 2.9568000000000002e-05, "loss": 0.0737, "step": 11299 }, { "epoch": 20.84949215143121, "grad_norm": 0.47118571400642395, "learning_rate": 2.9567666666666665e-05, "loss": 0.0371, "step": 11300 }, { "epoch": 20.85133887349954, "grad_norm": 0.49245890974998474, "learning_rate": 2.9567333333333334e-05, "loss": 0.0704, "step": 11301 }, { "epoch": 20.85318559556787, "grad_norm": 0.33564913272857666, "learning_rate": 2.9567000000000003e-05, "loss": 0.0356, "step": 11302 }, { "epoch": 20.855032317636194, "grad_norm": 0.36780816316604614, "learning_rate": 2.9566666666666666e-05, "loss": 0.0806, "step": 11303 }, { "epoch": 20.856879039704523, "grad_norm": 0.6674977540969849, "learning_rate": 2.9566333333333335e-05, "loss": 0.0328, "step": 11304 }, { "epoch": 20.858725761772853, "grad_norm": 0.497979998588562, "learning_rate": 2.9566e-05, "loss": 0.0261, "step": 11305 }, { "epoch": 20.86057248384118, "grad_norm": 0.23606769740581512, "learning_rate": 2.9565666666666667e-05, "loss": 0.0222, "step": 11306 }, { "epoch": 20.86241920590951, "grad_norm": 0.2154080867767334, "learning_rate": 2.9565333333333333e-05, "loss": 0.0083, "step": 11307 }, { "epoch": 20.86426592797784, "grad_norm": 0.23044618964195251, "learning_rate": 2.9565000000000002e-05, "loss": 0.0141, "step": 11308 }, { "epoch": 20.86611265004617, "grad_norm": 0.7383080124855042, "learning_rate": 2.9564666666666665e-05, "loss": 0.0095, "step": 11309 }, { "epoch": 20.8679593721145, "grad_norm": 0.21569529175758362, "learning_rate": 2.9564333333333334e-05, "loss": 0.0129, "step": 11310 }, { "epoch": 20.869806094182824, "grad_norm": 0.2047816663980484, "learning_rate": 2.9564000000000004e-05, "loss": 0.0076, "step": 11311 }, { "epoch": 20.871652816251153, "grad_norm": 0.3197352886199951, "learning_rate": 2.9563666666666666e-05, "loss": 0.0184, "step": 11312 }, { "epoch": 20.873499538319482, "grad_norm": 0.3038652539253235, "learning_rate": 2.9563333333333335e-05, "loss": 0.0109, "step": 11313 }, { "epoch": 20.87534626038781, "grad_norm": 0.6552397608757019, "learning_rate": 2.9563e-05, "loss": 0.0222, "step": 11314 }, { "epoch": 20.87719298245614, "grad_norm": 0.5822405815124512, "learning_rate": 2.9562666666666667e-05, "loss": 0.0196, "step": 11315 }, { "epoch": 20.87903970452447, "grad_norm": 0.12892888486385345, "learning_rate": 2.9562333333333333e-05, "loss": 0.0051, "step": 11316 }, { "epoch": 20.8808864265928, "grad_norm": 0.1823236346244812, "learning_rate": 2.9562000000000003e-05, "loss": 0.0082, "step": 11317 }, { "epoch": 20.882733148661128, "grad_norm": 0.17489029467105865, "learning_rate": 2.956166666666667e-05, "loss": 0.0063, "step": 11318 }, { "epoch": 20.884579870729453, "grad_norm": 0.24932153522968292, "learning_rate": 2.9561333333333334e-05, "loss": 0.0132, "step": 11319 }, { "epoch": 20.886426592797783, "grad_norm": 0.1961432546377182, "learning_rate": 2.9561e-05, "loss": 0.0066, "step": 11320 }, { "epoch": 20.88827331486611, "grad_norm": 0.426115483045578, "learning_rate": 2.9560666666666666e-05, "loss": 0.0112, "step": 11321 }, { "epoch": 20.89012003693444, "grad_norm": 0.29435256123542786, "learning_rate": 2.9560333333333336e-05, "loss": 0.0102, "step": 11322 }, { "epoch": 20.89196675900277, "grad_norm": 0.31088516116142273, "learning_rate": 2.9559999999999998e-05, "loss": 0.0079, "step": 11323 }, { "epoch": 20.8938134810711, "grad_norm": 1.155611515045166, "learning_rate": 2.9559666666666668e-05, "loss": 0.0215, "step": 11324 }, { "epoch": 20.89566020313943, "grad_norm": 0.31312328577041626, "learning_rate": 2.9559333333333333e-05, "loss": 0.0121, "step": 11325 }, { "epoch": 20.897506925207757, "grad_norm": 0.27958038449287415, "learning_rate": 2.9559e-05, "loss": 0.0043, "step": 11326 }, { "epoch": 20.899353647276087, "grad_norm": 0.18234357237815857, "learning_rate": 2.955866666666667e-05, "loss": 0.0049, "step": 11327 }, { "epoch": 20.901200369344412, "grad_norm": 0.39806634187698364, "learning_rate": 2.9558333333333335e-05, "loss": 0.0139, "step": 11328 }, { "epoch": 20.90304709141274, "grad_norm": 0.5366166234016418, "learning_rate": 2.9558e-05, "loss": 0.0209, "step": 11329 }, { "epoch": 20.90489381348107, "grad_norm": 0.39424633979797363, "learning_rate": 2.9557666666666667e-05, "loss": 0.0112, "step": 11330 }, { "epoch": 20.9067405355494, "grad_norm": 0.4483903646469116, "learning_rate": 2.9557333333333336e-05, "loss": 0.0151, "step": 11331 }, { "epoch": 20.90858725761773, "grad_norm": 0.30543461441993713, "learning_rate": 2.9557e-05, "loss": 0.0119, "step": 11332 }, { "epoch": 20.910433979686058, "grad_norm": 0.24922345578670502, "learning_rate": 2.9556666666666668e-05, "loss": 0.0057, "step": 11333 }, { "epoch": 20.912280701754387, "grad_norm": 0.1164715439081192, "learning_rate": 2.9556333333333334e-05, "loss": 0.0034, "step": 11334 }, { "epoch": 20.914127423822716, "grad_norm": 0.1792861968278885, "learning_rate": 2.9556e-05, "loss": 0.004, "step": 11335 }, { "epoch": 20.91597414589104, "grad_norm": 0.5263696908950806, "learning_rate": 2.955566666666667e-05, "loss": 0.0165, "step": 11336 }, { "epoch": 20.91782086795937, "grad_norm": 0.36606866121292114, "learning_rate": 2.9555333333333335e-05, "loss": 0.0095, "step": 11337 }, { "epoch": 20.9196675900277, "grad_norm": 0.31590327620506287, "learning_rate": 2.9555e-05, "loss": 0.0078, "step": 11338 }, { "epoch": 20.92151431209603, "grad_norm": 0.5138630270957947, "learning_rate": 2.9554666666666667e-05, "loss": 0.0203, "step": 11339 }, { "epoch": 20.92336103416436, "grad_norm": 0.23742391169071198, "learning_rate": 2.9554333333333336e-05, "loss": 0.0203, "step": 11340 }, { "epoch": 20.925207756232687, "grad_norm": 0.6078528165817261, "learning_rate": 2.9554e-05, "loss": 0.1362, "step": 11341 }, { "epoch": 20.927054478301017, "grad_norm": 0.7384791970252991, "learning_rate": 2.9553666666666668e-05, "loss": 0.1074, "step": 11342 }, { "epoch": 20.928901200369346, "grad_norm": 0.5082293152809143, "learning_rate": 2.9553333333333334e-05, "loss": 0.1437, "step": 11343 }, { "epoch": 20.930747922437675, "grad_norm": 0.4633669853210449, "learning_rate": 2.9553e-05, "loss": 0.0835, "step": 11344 }, { "epoch": 20.932594644506, "grad_norm": 0.5880398750305176, "learning_rate": 2.955266666666667e-05, "loss": 0.1012, "step": 11345 }, { "epoch": 20.93444136657433, "grad_norm": 0.5600230097770691, "learning_rate": 2.9552333333333335e-05, "loss": 0.0635, "step": 11346 }, { "epoch": 20.93628808864266, "grad_norm": 0.5349959135055542, "learning_rate": 2.9552e-05, "loss": 0.078, "step": 11347 }, { "epoch": 20.938134810710988, "grad_norm": 0.6552622318267822, "learning_rate": 2.9551666666666667e-05, "loss": 0.0464, "step": 11348 }, { "epoch": 20.939981532779317, "grad_norm": 1.903660774230957, "learning_rate": 2.9551333333333333e-05, "loss": 0.0537, "step": 11349 }, { "epoch": 20.941828254847646, "grad_norm": 0.5627049803733826, "learning_rate": 2.9551e-05, "loss": 0.0254, "step": 11350 }, { "epoch": 20.943674976915975, "grad_norm": 0.3178408741950989, "learning_rate": 2.9550666666666668e-05, "loss": 0.0695, "step": 11351 }, { "epoch": 20.945521698984304, "grad_norm": 0.32476845383644104, "learning_rate": 2.9550333333333334e-05, "loss": 0.0187, "step": 11352 }, { "epoch": 20.94736842105263, "grad_norm": 0.37900370359420776, "learning_rate": 2.955e-05, "loss": 0.0195, "step": 11353 }, { "epoch": 20.94921514312096, "grad_norm": 0.38062894344329834, "learning_rate": 2.954966666666667e-05, "loss": 0.0125, "step": 11354 }, { "epoch": 20.95106186518929, "grad_norm": 0.26303040981292725, "learning_rate": 2.9549333333333332e-05, "loss": 0.0125, "step": 11355 }, { "epoch": 20.952908587257618, "grad_norm": 0.36702829599380493, "learning_rate": 2.9549e-05, "loss": 0.0298, "step": 11356 }, { "epoch": 20.954755309325947, "grad_norm": 0.2526761591434479, "learning_rate": 2.9548666666666667e-05, "loss": 0.0101, "step": 11357 }, { "epoch": 20.956602031394276, "grad_norm": 0.3408384919166565, "learning_rate": 2.9548333333333333e-05, "loss": 0.0137, "step": 11358 }, { "epoch": 20.958448753462605, "grad_norm": 0.21575458347797394, "learning_rate": 2.9548e-05, "loss": 0.0101, "step": 11359 }, { "epoch": 20.960295475530934, "grad_norm": 0.16458436846733093, "learning_rate": 2.954766666666667e-05, "loss": 0.0086, "step": 11360 }, { "epoch": 20.96214219759926, "grad_norm": 0.4075638949871063, "learning_rate": 2.9547333333333334e-05, "loss": 0.0236, "step": 11361 }, { "epoch": 20.96398891966759, "grad_norm": 0.21881143748760223, "learning_rate": 2.9547e-05, "loss": 0.0091, "step": 11362 }, { "epoch": 20.965835641735918, "grad_norm": 0.18376675248146057, "learning_rate": 2.954666666666667e-05, "loss": 0.0093, "step": 11363 }, { "epoch": 20.967682363804247, "grad_norm": 0.12338406592607498, "learning_rate": 2.9546333333333332e-05, "loss": 0.0044, "step": 11364 }, { "epoch": 20.969529085872576, "grad_norm": 0.7155386209487915, "learning_rate": 2.9546e-05, "loss": 0.0099, "step": 11365 }, { "epoch": 20.971375807940905, "grad_norm": 0.1896418035030365, "learning_rate": 2.9545666666666667e-05, "loss": 0.0063, "step": 11366 }, { "epoch": 20.973222530009235, "grad_norm": 0.6268427968025208, "learning_rate": 2.9545333333333333e-05, "loss": 0.0114, "step": 11367 }, { "epoch": 20.975069252077564, "grad_norm": 0.25467798113822937, "learning_rate": 2.9545000000000003e-05, "loss": 0.0052, "step": 11368 }, { "epoch": 20.97691597414589, "grad_norm": 0.0892992615699768, "learning_rate": 2.954466666666667e-05, "loss": 0.0033, "step": 11369 }, { "epoch": 20.97876269621422, "grad_norm": 0.5027537941932678, "learning_rate": 2.9544333333333334e-05, "loss": 0.0097, "step": 11370 }, { "epoch": 20.980609418282548, "grad_norm": 0.4184074401855469, "learning_rate": 2.9544e-05, "loss": 0.0202, "step": 11371 }, { "epoch": 20.982456140350877, "grad_norm": 0.17174819111824036, "learning_rate": 2.954366666666667e-05, "loss": 0.0042, "step": 11372 }, { "epoch": 20.984302862419206, "grad_norm": 0.30898192524909973, "learning_rate": 2.9543333333333332e-05, "loss": 0.0131, "step": 11373 }, { "epoch": 20.986149584487535, "grad_norm": 0.3742638826370239, "learning_rate": 2.9543e-05, "loss": 0.0091, "step": 11374 }, { "epoch": 20.987996306555864, "grad_norm": 0.5427600741386414, "learning_rate": 2.9542666666666668e-05, "loss": 0.0128, "step": 11375 }, { "epoch": 20.989843028624193, "grad_norm": 0.2309504598379135, "learning_rate": 2.9542333333333333e-05, "loss": 0.009, "step": 11376 }, { "epoch": 20.991689750692522, "grad_norm": 0.3140806257724762, "learning_rate": 2.9542000000000003e-05, "loss": 0.0081, "step": 11377 }, { "epoch": 20.993536472760848, "grad_norm": 0.41816213726997375, "learning_rate": 2.954166666666667e-05, "loss": 0.006, "step": 11378 }, { "epoch": 20.995383194829177, "grad_norm": 0.3930540084838867, "learning_rate": 2.9541333333333335e-05, "loss": 0.0079, "step": 11379 }, { "epoch": 20.997229916897506, "grad_norm": 0.3547838628292084, "learning_rate": 2.9541e-05, "loss": 0.0126, "step": 11380 }, { "epoch": 20.999076638965835, "grad_norm": 0.40626728534698486, "learning_rate": 2.9540666666666667e-05, "loss": 0.0176, "step": 11381 }, { "epoch": 21.0, "grad_norm": 0.15839992463588715, "learning_rate": 2.9540333333333332e-05, "loss": 0.0027, "step": 11382 }, { "epoch": 21.00184672206833, "grad_norm": 0.7238561511039734, "learning_rate": 2.9540000000000002e-05, "loss": 0.1525, "step": 11383 }, { "epoch": 21.00369344413666, "grad_norm": 0.5545807480812073, "learning_rate": 2.9539666666666664e-05, "loss": 0.0998, "step": 11384 }, { "epoch": 21.005540166204987, "grad_norm": 0.521634578704834, "learning_rate": 2.9539333333333334e-05, "loss": 0.0941, "step": 11385 }, { "epoch": 21.007386888273317, "grad_norm": 0.44180136919021606, "learning_rate": 2.9539000000000003e-05, "loss": 0.0553, "step": 11386 }, { "epoch": 21.009233610341642, "grad_norm": 0.4523581862449646, "learning_rate": 2.9538666666666666e-05, "loss": 0.0617, "step": 11387 }, { "epoch": 21.01108033240997, "grad_norm": 0.6714923977851868, "learning_rate": 2.9538333333333335e-05, "loss": 0.0931, "step": 11388 }, { "epoch": 21.0129270544783, "grad_norm": 0.6149570345878601, "learning_rate": 2.9538e-05, "loss": 0.054, "step": 11389 }, { "epoch": 21.01477377654663, "grad_norm": 0.6411060690879822, "learning_rate": 2.9537666666666667e-05, "loss": 0.063, "step": 11390 }, { "epoch": 21.01662049861496, "grad_norm": 0.49264752864837646, "learning_rate": 2.9537333333333333e-05, "loss": 0.0485, "step": 11391 }, { "epoch": 21.018467220683288, "grad_norm": 0.6387502551078796, "learning_rate": 2.9537000000000002e-05, "loss": 0.0811, "step": 11392 }, { "epoch": 21.020313942751617, "grad_norm": 0.6217536926269531, "learning_rate": 2.9536666666666668e-05, "loss": 0.0336, "step": 11393 }, { "epoch": 21.022160664819946, "grad_norm": 0.41189178824424744, "learning_rate": 2.9536333333333334e-05, "loss": 0.0546, "step": 11394 }, { "epoch": 21.02400738688827, "grad_norm": 0.29428115487098694, "learning_rate": 2.9536000000000003e-05, "loss": 0.0751, "step": 11395 }, { "epoch": 21.0258541089566, "grad_norm": 0.4770805537700653, "learning_rate": 2.9535666666666666e-05, "loss": 0.0325, "step": 11396 }, { "epoch": 21.02770083102493, "grad_norm": 0.5486339330673218, "learning_rate": 2.9535333333333335e-05, "loss": 0.0211, "step": 11397 }, { "epoch": 21.02954755309326, "grad_norm": 0.18714967370033264, "learning_rate": 2.9535e-05, "loss": 0.0144, "step": 11398 }, { "epoch": 21.03139427516159, "grad_norm": 0.22030745446681976, "learning_rate": 2.9534666666666667e-05, "loss": 0.0101, "step": 11399 }, { "epoch": 21.033240997229917, "grad_norm": 0.22807270288467407, "learning_rate": 2.9534333333333333e-05, "loss": 0.0138, "step": 11400 }, { "epoch": 21.035087719298247, "grad_norm": 0.3241746723651886, "learning_rate": 2.9534000000000002e-05, "loss": 0.0083, "step": 11401 }, { "epoch": 21.036934441366576, "grad_norm": 0.21546830236911774, "learning_rate": 2.9533666666666668e-05, "loss": 0.0074, "step": 11402 }, { "epoch": 21.0387811634349, "grad_norm": 0.19861456751823425, "learning_rate": 2.9533333333333334e-05, "loss": 0.0111, "step": 11403 }, { "epoch": 21.04062788550323, "grad_norm": 0.39724117517471313, "learning_rate": 2.9533000000000003e-05, "loss": 0.0189, "step": 11404 }, { "epoch": 21.04247460757156, "grad_norm": 0.21916146576404572, "learning_rate": 2.9532666666666666e-05, "loss": 0.0055, "step": 11405 }, { "epoch": 21.04432132963989, "grad_norm": 0.24507205188274384, "learning_rate": 2.9532333333333335e-05, "loss": 0.0112, "step": 11406 }, { "epoch": 21.046168051708218, "grad_norm": 0.35915157198905945, "learning_rate": 2.9532e-05, "loss": 0.0137, "step": 11407 }, { "epoch": 21.048014773776547, "grad_norm": 0.3480655550956726, "learning_rate": 2.9531666666666667e-05, "loss": 0.0326, "step": 11408 }, { "epoch": 21.049861495844876, "grad_norm": 0.17546714842319489, "learning_rate": 2.9531333333333333e-05, "loss": 0.0078, "step": 11409 }, { "epoch": 21.051708217913205, "grad_norm": 0.22640059888362885, "learning_rate": 2.9531e-05, "loss": 0.0067, "step": 11410 }, { "epoch": 21.053554939981534, "grad_norm": 0.1389237493276596, "learning_rate": 2.953066666666667e-05, "loss": 0.004, "step": 11411 }, { "epoch": 21.05540166204986, "grad_norm": 0.15366314351558685, "learning_rate": 2.9530333333333334e-05, "loss": 0.0042, "step": 11412 }, { "epoch": 21.05724838411819, "grad_norm": 0.21289034187793732, "learning_rate": 2.953e-05, "loss": 0.0067, "step": 11413 }, { "epoch": 21.05909510618652, "grad_norm": 0.1926611363887787, "learning_rate": 2.9529666666666666e-05, "loss": 0.0062, "step": 11414 }, { "epoch": 21.060941828254848, "grad_norm": 0.2734687626361847, "learning_rate": 2.9529333333333335e-05, "loss": 0.0103, "step": 11415 }, { "epoch": 21.062788550323177, "grad_norm": 0.1464875340461731, "learning_rate": 2.9528999999999998e-05, "loss": 0.0036, "step": 11416 }, { "epoch": 21.064635272391506, "grad_norm": 0.2204812616109848, "learning_rate": 2.9528666666666667e-05, "loss": 0.0053, "step": 11417 }, { "epoch": 21.066481994459835, "grad_norm": 0.2089698612689972, "learning_rate": 2.9528333333333337e-05, "loss": 0.0042, "step": 11418 }, { "epoch": 21.068328716528164, "grad_norm": 0.26201918721199036, "learning_rate": 2.9528e-05, "loss": 0.0074, "step": 11419 }, { "epoch": 21.07017543859649, "grad_norm": 0.41449442505836487, "learning_rate": 2.952766666666667e-05, "loss": 0.0113, "step": 11420 }, { "epoch": 21.07202216066482, "grad_norm": 0.3523827791213989, "learning_rate": 2.9527333333333334e-05, "loss": 0.0106, "step": 11421 }, { "epoch": 21.073868882733148, "grad_norm": 0.11771949380636215, "learning_rate": 2.9527e-05, "loss": 0.003, "step": 11422 }, { "epoch": 21.075715604801477, "grad_norm": 0.15286806225776672, "learning_rate": 2.9526666666666666e-05, "loss": 0.0043, "step": 11423 }, { "epoch": 21.077562326869806, "grad_norm": 0.28174591064453125, "learning_rate": 2.9526333333333336e-05, "loss": 0.006, "step": 11424 }, { "epoch": 21.079409048938135, "grad_norm": 0.2134329378604889, "learning_rate": 2.9525999999999998e-05, "loss": 0.0085, "step": 11425 }, { "epoch": 21.081255771006465, "grad_norm": 0.22803491353988647, "learning_rate": 2.9525666666666668e-05, "loss": 0.0091, "step": 11426 }, { "epoch": 21.083102493074794, "grad_norm": 0.3291338384151459, "learning_rate": 2.9525333333333337e-05, "loss": 0.0092, "step": 11427 }, { "epoch": 21.08494921514312, "grad_norm": 0.31323766708374023, "learning_rate": 2.9525e-05, "loss": 0.015, "step": 11428 }, { "epoch": 21.08679593721145, "grad_norm": 0.2662685811519623, "learning_rate": 2.952466666666667e-05, "loss": 0.0099, "step": 11429 }, { "epoch": 21.088642659279778, "grad_norm": 0.4421330392360687, "learning_rate": 2.9524333333333335e-05, "loss": 0.0069, "step": 11430 }, { "epoch": 21.090489381348107, "grad_norm": 0.2042483538389206, "learning_rate": 2.9524e-05, "loss": 0.0039, "step": 11431 }, { "epoch": 21.092336103416436, "grad_norm": 0.29195424914360046, "learning_rate": 2.9523666666666667e-05, "loss": 0.021, "step": 11432 }, { "epoch": 21.094182825484765, "grad_norm": 0.5284271240234375, "learning_rate": 2.9523333333333336e-05, "loss": 0.1348, "step": 11433 }, { "epoch": 21.096029547553094, "grad_norm": 0.6061310172080994, "learning_rate": 2.9523e-05, "loss": 0.1549, "step": 11434 }, { "epoch": 21.097876269621423, "grad_norm": 0.7104319334030151, "learning_rate": 2.9522666666666668e-05, "loss": 0.1185, "step": 11435 }, { "epoch": 21.099722991689752, "grad_norm": 0.40811780095100403, "learning_rate": 2.9522333333333337e-05, "loss": 0.0631, "step": 11436 }, { "epoch": 21.101569713758078, "grad_norm": 0.48598840832710266, "learning_rate": 2.9522e-05, "loss": 0.1222, "step": 11437 }, { "epoch": 21.103416435826407, "grad_norm": 0.5141649842262268, "learning_rate": 2.952166666666667e-05, "loss": 0.0749, "step": 11438 }, { "epoch": 21.105263157894736, "grad_norm": 0.3259578049182892, "learning_rate": 2.9521333333333335e-05, "loss": 0.0342, "step": 11439 }, { "epoch": 21.107109879963065, "grad_norm": 0.4215473532676697, "learning_rate": 2.9521e-05, "loss": 0.042, "step": 11440 }, { "epoch": 21.108956602031395, "grad_norm": 1.668999433517456, "learning_rate": 2.9520666666666667e-05, "loss": 0.0775, "step": 11441 }, { "epoch": 21.110803324099724, "grad_norm": 0.3182021975517273, "learning_rate": 2.9520333333333333e-05, "loss": 0.0447, "step": 11442 }, { "epoch": 21.112650046168053, "grad_norm": 0.44492602348327637, "learning_rate": 2.9520000000000002e-05, "loss": 0.0343, "step": 11443 }, { "epoch": 21.114496768236382, "grad_norm": 0.32682621479034424, "learning_rate": 2.9519666666666668e-05, "loss": 0.0295, "step": 11444 }, { "epoch": 21.116343490304708, "grad_norm": 0.4603782892227173, "learning_rate": 2.9519333333333334e-05, "loss": 0.0817, "step": 11445 }, { "epoch": 21.118190212373037, "grad_norm": 0.24359837174415588, "learning_rate": 2.9519e-05, "loss": 0.0164, "step": 11446 }, { "epoch": 21.120036934441366, "grad_norm": 0.4243770241737366, "learning_rate": 2.951866666666667e-05, "loss": 0.0193, "step": 11447 }, { "epoch": 21.121883656509695, "grad_norm": 0.2748834788799286, "learning_rate": 2.951833333333333e-05, "loss": 0.011, "step": 11448 }, { "epoch": 21.123730378578024, "grad_norm": 0.45617279410362244, "learning_rate": 2.9518e-05, "loss": 0.0105, "step": 11449 }, { "epoch": 21.125577100646353, "grad_norm": 0.32819056510925293, "learning_rate": 2.9517666666666667e-05, "loss": 0.0462, "step": 11450 }, { "epoch": 21.127423822714682, "grad_norm": 0.1864812821149826, "learning_rate": 2.9517333333333333e-05, "loss": 0.0091, "step": 11451 }, { "epoch": 21.12927054478301, "grad_norm": 0.14667195081710815, "learning_rate": 2.9517000000000002e-05, "loss": 0.0064, "step": 11452 }, { "epoch": 21.131117266851337, "grad_norm": 0.3102090358734131, "learning_rate": 2.9516666666666668e-05, "loss": 0.0107, "step": 11453 }, { "epoch": 21.132963988919666, "grad_norm": 0.44857123494148254, "learning_rate": 2.9516333333333334e-05, "loss": 0.0055, "step": 11454 }, { "epoch": 21.134810710987995, "grad_norm": 0.34422293305397034, "learning_rate": 2.9516e-05, "loss": 0.01, "step": 11455 }, { "epoch": 21.136657433056325, "grad_norm": 0.14523011445999146, "learning_rate": 2.951566666666667e-05, "loss": 0.0063, "step": 11456 }, { "epoch": 21.138504155124654, "grad_norm": 0.26662322878837585, "learning_rate": 2.9515333333333332e-05, "loss": 0.0095, "step": 11457 }, { "epoch": 21.140350877192983, "grad_norm": 0.22227898240089417, "learning_rate": 2.9515e-05, "loss": 0.0104, "step": 11458 }, { "epoch": 21.142197599261312, "grad_norm": 0.1880018413066864, "learning_rate": 2.9514666666666667e-05, "loss": 0.0073, "step": 11459 }, { "epoch": 21.14404432132964, "grad_norm": 0.19501319527626038, "learning_rate": 2.9514333333333333e-05, "loss": 0.0049, "step": 11460 }, { "epoch": 21.14589104339797, "grad_norm": 0.13313603401184082, "learning_rate": 2.9514000000000002e-05, "loss": 0.0045, "step": 11461 }, { "epoch": 21.147737765466296, "grad_norm": 0.36689451336860657, "learning_rate": 2.9513666666666668e-05, "loss": 0.0086, "step": 11462 }, { "epoch": 21.149584487534625, "grad_norm": 0.3137074112892151, "learning_rate": 2.9513333333333334e-05, "loss": 0.0089, "step": 11463 }, { "epoch": 21.151431209602954, "grad_norm": 0.16122853755950928, "learning_rate": 2.9513e-05, "loss": 0.0048, "step": 11464 }, { "epoch": 21.153277931671283, "grad_norm": 0.1358654499053955, "learning_rate": 2.951266666666667e-05, "loss": 0.0025, "step": 11465 }, { "epoch": 21.155124653739612, "grad_norm": 0.33823901414871216, "learning_rate": 2.9512333333333332e-05, "loss": 0.0124, "step": 11466 }, { "epoch": 21.15697137580794, "grad_norm": 0.2347034364938736, "learning_rate": 2.9512e-05, "loss": 0.0055, "step": 11467 }, { "epoch": 21.15881809787627, "grad_norm": 0.3039151132106781, "learning_rate": 2.951166666666667e-05, "loss": 0.0078, "step": 11468 }, { "epoch": 21.1606648199446, "grad_norm": 0.12040606886148453, "learning_rate": 2.9511333333333333e-05, "loss": 0.0043, "step": 11469 }, { "epoch": 21.162511542012926, "grad_norm": 0.47133681178092957, "learning_rate": 2.9511000000000003e-05, "loss": 0.0048, "step": 11470 }, { "epoch": 21.164358264081255, "grad_norm": 0.39657193422317505, "learning_rate": 2.9510666666666665e-05, "loss": 0.0042, "step": 11471 }, { "epoch": 21.166204986149584, "grad_norm": 0.23207911849021912, "learning_rate": 2.9510333333333334e-05, "loss": 0.0082, "step": 11472 }, { "epoch": 21.168051708217913, "grad_norm": 0.7355784177780151, "learning_rate": 2.951e-05, "loss": 0.0089, "step": 11473 }, { "epoch": 21.169898430286242, "grad_norm": 0.1707821637392044, "learning_rate": 2.9509666666666666e-05, "loss": 0.005, "step": 11474 }, { "epoch": 21.17174515235457, "grad_norm": 0.21882984042167664, "learning_rate": 2.9509333333333332e-05, "loss": 0.008, "step": 11475 }, { "epoch": 21.1735918744229, "grad_norm": 0.4786779284477234, "learning_rate": 2.9509e-05, "loss": 0.0116, "step": 11476 }, { "epoch": 21.17543859649123, "grad_norm": 0.9925299882888794, "learning_rate": 2.9508666666666668e-05, "loss": 0.0112, "step": 11477 }, { "epoch": 21.177285318559555, "grad_norm": 0.28563570976257324, "learning_rate": 2.9508333333333333e-05, "loss": 0.0059, "step": 11478 }, { "epoch": 21.179132040627884, "grad_norm": 0.19293378293514252, "learning_rate": 2.9508000000000003e-05, "loss": 0.0057, "step": 11479 }, { "epoch": 21.180978762696213, "grad_norm": 0.2053467184305191, "learning_rate": 2.9507666666666665e-05, "loss": 0.0053, "step": 11480 }, { "epoch": 21.182825484764543, "grad_norm": 0.44861987233161926, "learning_rate": 2.9507333333333335e-05, "loss": 0.0086, "step": 11481 }, { "epoch": 21.18467220683287, "grad_norm": 0.42980897426605225, "learning_rate": 2.9507e-05, "loss": 0.0099, "step": 11482 }, { "epoch": 21.1865189289012, "grad_norm": 0.6594889163970947, "learning_rate": 2.9506666666666667e-05, "loss": 0.1313, "step": 11483 }, { "epoch": 21.18836565096953, "grad_norm": 0.7568273544311523, "learning_rate": 2.9506333333333332e-05, "loss": 0.1434, "step": 11484 }, { "epoch": 21.19021237303786, "grad_norm": 0.681679368019104, "learning_rate": 2.9506000000000002e-05, "loss": 0.1567, "step": 11485 }, { "epoch": 21.19205909510619, "grad_norm": 0.45565226674079895, "learning_rate": 2.9505666666666668e-05, "loss": 0.0674, "step": 11486 }, { "epoch": 21.193905817174514, "grad_norm": 0.5891532897949219, "learning_rate": 2.9505333333333334e-05, "loss": 0.1301, "step": 11487 }, { "epoch": 21.195752539242843, "grad_norm": 0.5234256982803345, "learning_rate": 2.9505000000000003e-05, "loss": 0.0551, "step": 11488 }, { "epoch": 21.197599261311172, "grad_norm": 0.557822048664093, "learning_rate": 2.9504666666666666e-05, "loss": 0.089, "step": 11489 }, { "epoch": 21.1994459833795, "grad_norm": 0.3232654929161072, "learning_rate": 2.9504333333333335e-05, "loss": 0.0384, "step": 11490 }, { "epoch": 21.20129270544783, "grad_norm": 0.5436959266662598, "learning_rate": 2.9504e-05, "loss": 0.0568, "step": 11491 }, { "epoch": 21.20313942751616, "grad_norm": 0.3208896517753601, "learning_rate": 2.9503666666666667e-05, "loss": 0.0287, "step": 11492 }, { "epoch": 21.20498614958449, "grad_norm": 0.331940233707428, "learning_rate": 2.9503333333333336e-05, "loss": 0.0384, "step": 11493 }, { "epoch": 21.206832871652818, "grad_norm": 0.3176758587360382, "learning_rate": 2.9503000000000002e-05, "loss": 0.043, "step": 11494 }, { "epoch": 21.208679593721143, "grad_norm": 0.6546593308448792, "learning_rate": 2.9502666666666668e-05, "loss": 0.0594, "step": 11495 }, { "epoch": 21.210526315789473, "grad_norm": 0.2821478247642517, "learning_rate": 2.9502333333333334e-05, "loss": 0.0339, "step": 11496 }, { "epoch": 21.2123730378578, "grad_norm": 0.3861061632633209, "learning_rate": 2.9502000000000003e-05, "loss": 0.0249, "step": 11497 }, { "epoch": 21.21421975992613, "grad_norm": 0.27039122581481934, "learning_rate": 2.9501666666666666e-05, "loss": 0.0144, "step": 11498 }, { "epoch": 21.21606648199446, "grad_norm": 0.18123827874660492, "learning_rate": 2.9501333333333335e-05, "loss": 0.008, "step": 11499 }, { "epoch": 21.21791320406279, "grad_norm": 0.2717362642288208, "learning_rate": 2.9500999999999998e-05, "loss": 0.014, "step": 11500 }, { "epoch": 21.21975992613112, "grad_norm": 0.2388276308774948, "learning_rate": 2.9500666666666667e-05, "loss": 0.0132, "step": 11501 }, { "epoch": 21.221606648199447, "grad_norm": 0.22589102387428284, "learning_rate": 2.9500333333333336e-05, "loss": 0.0091, "step": 11502 }, { "epoch": 21.223453370267773, "grad_norm": 0.21260640025138855, "learning_rate": 2.95e-05, "loss": 0.0344, "step": 11503 }, { "epoch": 21.225300092336102, "grad_norm": 0.6328578591346741, "learning_rate": 2.9499666666666668e-05, "loss": 0.0065, "step": 11504 }, { "epoch": 21.22714681440443, "grad_norm": 0.2761143147945404, "learning_rate": 2.9499333333333334e-05, "loss": 0.0135, "step": 11505 }, { "epoch": 21.22899353647276, "grad_norm": 0.46611031889915466, "learning_rate": 2.9499e-05, "loss": 0.0186, "step": 11506 }, { "epoch": 21.23084025854109, "grad_norm": 0.8633270263671875, "learning_rate": 2.9498666666666666e-05, "loss": 0.0133, "step": 11507 }, { "epoch": 21.23268698060942, "grad_norm": 0.2508547604084015, "learning_rate": 2.9498333333333335e-05, "loss": 0.0045, "step": 11508 }, { "epoch": 21.234533702677748, "grad_norm": 0.18956667184829712, "learning_rate": 2.9497999999999998e-05, "loss": 0.0054, "step": 11509 }, { "epoch": 21.236380424746077, "grad_norm": 0.2119181603193283, "learning_rate": 2.9497666666666667e-05, "loss": 0.0062, "step": 11510 }, { "epoch": 21.238227146814406, "grad_norm": 0.1728789210319519, "learning_rate": 2.9497333333333336e-05, "loss": 0.0068, "step": 11511 }, { "epoch": 21.24007386888273, "grad_norm": 0.5325166583061218, "learning_rate": 2.9497e-05, "loss": 0.0138, "step": 11512 }, { "epoch": 21.24192059095106, "grad_norm": 0.6290391087532043, "learning_rate": 2.9496666666666668e-05, "loss": 0.0138, "step": 11513 }, { "epoch": 21.24376731301939, "grad_norm": 0.16870111227035522, "learning_rate": 2.9496333333333334e-05, "loss": 0.008, "step": 11514 }, { "epoch": 21.24561403508772, "grad_norm": 0.2603950500488281, "learning_rate": 2.9496e-05, "loss": 0.0101, "step": 11515 }, { "epoch": 21.24746075715605, "grad_norm": 0.4347416162490845, "learning_rate": 2.9495666666666666e-05, "loss": 0.0159, "step": 11516 }, { "epoch": 21.249307479224377, "grad_norm": 0.21180957555770874, "learning_rate": 2.9495333333333335e-05, "loss": 0.0123, "step": 11517 }, { "epoch": 21.251154201292707, "grad_norm": 0.20680710673332214, "learning_rate": 2.9495e-05, "loss": 0.0074, "step": 11518 }, { "epoch": 21.253000923361036, "grad_norm": 0.23356780409812927, "learning_rate": 2.9494666666666667e-05, "loss": 0.0055, "step": 11519 }, { "epoch": 21.25484764542936, "grad_norm": 0.5864790678024292, "learning_rate": 2.9494333333333337e-05, "loss": 0.0047, "step": 11520 }, { "epoch": 21.25669436749769, "grad_norm": 0.38270673155784607, "learning_rate": 2.9494e-05, "loss": 0.0081, "step": 11521 }, { "epoch": 21.25854108956602, "grad_norm": 0.34858450293540955, "learning_rate": 2.949366666666667e-05, "loss": 0.0122, "step": 11522 }, { "epoch": 21.26038781163435, "grad_norm": 0.1718364804983139, "learning_rate": 2.9493333333333334e-05, "loss": 0.0043, "step": 11523 }, { "epoch": 21.262234533702678, "grad_norm": 0.4161956310272217, "learning_rate": 2.9493e-05, "loss": 0.0064, "step": 11524 }, { "epoch": 21.264081255771007, "grad_norm": 0.1917291134595871, "learning_rate": 2.9492666666666666e-05, "loss": 0.0049, "step": 11525 }, { "epoch": 21.265927977839336, "grad_norm": 0.20704177021980286, "learning_rate": 2.9492333333333336e-05, "loss": 0.0056, "step": 11526 }, { "epoch": 21.267774699907665, "grad_norm": 0.14914242923259735, "learning_rate": 2.9492e-05, "loss": 0.0041, "step": 11527 }, { "epoch": 21.26962142197599, "grad_norm": 0.2752462327480316, "learning_rate": 2.9491666666666667e-05, "loss": 0.0056, "step": 11528 }, { "epoch": 21.27146814404432, "grad_norm": 0.16511335968971252, "learning_rate": 2.9491333333333337e-05, "loss": 0.0024, "step": 11529 }, { "epoch": 21.27331486611265, "grad_norm": 0.31737568974494934, "learning_rate": 2.9491e-05, "loss": 0.0085, "step": 11530 }, { "epoch": 21.27516158818098, "grad_norm": 0.2316114753484726, "learning_rate": 2.949066666666667e-05, "loss": 0.0044, "step": 11531 }, { "epoch": 21.277008310249307, "grad_norm": 0.3384943902492523, "learning_rate": 2.949033333333333e-05, "loss": 0.0185, "step": 11532 }, { "epoch": 21.278855032317637, "grad_norm": 0.6905454993247986, "learning_rate": 2.949e-05, "loss": 0.1296, "step": 11533 }, { "epoch": 21.280701754385966, "grad_norm": 0.6204212307929993, "learning_rate": 2.9489666666666666e-05, "loss": 0.1165, "step": 11534 }, { "epoch": 21.282548476454295, "grad_norm": 0.45431268215179443, "learning_rate": 2.9489333333333332e-05, "loss": 0.0895, "step": 11535 }, { "epoch": 21.284395198522624, "grad_norm": 0.5600978136062622, "learning_rate": 2.9489000000000002e-05, "loss": 0.106, "step": 11536 }, { "epoch": 21.28624192059095, "grad_norm": 0.5091732144355774, "learning_rate": 2.9488666666666668e-05, "loss": 0.0879, "step": 11537 }, { "epoch": 21.28808864265928, "grad_norm": 0.5164522528648376, "learning_rate": 2.9488333333333334e-05, "loss": 0.0856, "step": 11538 }, { "epoch": 21.289935364727608, "grad_norm": 0.3764141798019409, "learning_rate": 2.9488e-05, "loss": 0.0866, "step": 11539 }, { "epoch": 21.291782086795937, "grad_norm": 0.3163754343986511, "learning_rate": 2.948766666666667e-05, "loss": 0.049, "step": 11540 }, { "epoch": 21.293628808864266, "grad_norm": 0.48345544934272766, "learning_rate": 2.948733333333333e-05, "loss": 0.0335, "step": 11541 }, { "epoch": 21.295475530932595, "grad_norm": 0.3480796217918396, "learning_rate": 2.9487e-05, "loss": 0.047, "step": 11542 }, { "epoch": 21.297322253000925, "grad_norm": 0.7356621623039246, "learning_rate": 2.948666666666667e-05, "loss": 0.0324, "step": 11543 }, { "epoch": 21.299168975069254, "grad_norm": 0.24600377678871155, "learning_rate": 2.9486333333333333e-05, "loss": 0.0218, "step": 11544 }, { "epoch": 21.30101569713758, "grad_norm": 0.242000550031662, "learning_rate": 2.9486000000000002e-05, "loss": 0.0216, "step": 11545 }, { "epoch": 21.30286241920591, "grad_norm": 0.31422117352485657, "learning_rate": 2.9485666666666668e-05, "loss": 0.0299, "step": 11546 }, { "epoch": 21.304709141274238, "grad_norm": 0.2681136727333069, "learning_rate": 2.9485333333333334e-05, "loss": 0.0118, "step": 11547 }, { "epoch": 21.306555863342567, "grad_norm": 0.46410465240478516, "learning_rate": 2.9485e-05, "loss": 0.0594, "step": 11548 }, { "epoch": 21.308402585410896, "grad_norm": 1.1594105958938599, "learning_rate": 2.948466666666667e-05, "loss": 0.0341, "step": 11549 }, { "epoch": 21.310249307479225, "grad_norm": 0.18428504467010498, "learning_rate": 2.948433333333333e-05, "loss": 0.0106, "step": 11550 }, { "epoch": 21.312096029547554, "grad_norm": 0.2524779140949249, "learning_rate": 2.9484e-05, "loss": 0.0124, "step": 11551 }, { "epoch": 21.313942751615883, "grad_norm": 0.4212198555469513, "learning_rate": 2.948366666666667e-05, "loss": 0.0152, "step": 11552 }, { "epoch": 21.31578947368421, "grad_norm": 0.2710402011871338, "learning_rate": 2.9483333333333333e-05, "loss": 0.016, "step": 11553 }, { "epoch": 21.317636195752538, "grad_norm": 0.1798483282327652, "learning_rate": 2.9483000000000002e-05, "loss": 0.0068, "step": 11554 }, { "epoch": 21.319482917820867, "grad_norm": 0.3181666135787964, "learning_rate": 2.9482666666666668e-05, "loss": 0.0061, "step": 11555 }, { "epoch": 21.321329639889196, "grad_norm": 0.1473083347082138, "learning_rate": 2.9482333333333334e-05, "loss": 0.0082, "step": 11556 }, { "epoch": 21.323176361957525, "grad_norm": 0.302139550447464, "learning_rate": 2.9482e-05, "loss": 0.0076, "step": 11557 }, { "epoch": 21.325023084025855, "grad_norm": 0.21949274837970734, "learning_rate": 2.948166666666667e-05, "loss": 0.0084, "step": 11558 }, { "epoch": 21.326869806094184, "grad_norm": 0.1488589644432068, "learning_rate": 2.9481333333333332e-05, "loss": 0.0055, "step": 11559 }, { "epoch": 21.328716528162513, "grad_norm": 0.4395733177661896, "learning_rate": 2.9481e-05, "loss": 0.0079, "step": 11560 }, { "epoch": 21.330563250230842, "grad_norm": 0.38077491521835327, "learning_rate": 2.9480666666666667e-05, "loss": 0.009, "step": 11561 }, { "epoch": 21.332409972299168, "grad_norm": 0.17946629226207733, "learning_rate": 2.9480333333333333e-05, "loss": 0.0046, "step": 11562 }, { "epoch": 21.334256694367497, "grad_norm": 0.25635477900505066, "learning_rate": 2.9480000000000002e-05, "loss": 0.0087, "step": 11563 }, { "epoch": 21.336103416435826, "grad_norm": 0.18848296999931335, "learning_rate": 2.9479666666666665e-05, "loss": 0.0073, "step": 11564 }, { "epoch": 21.337950138504155, "grad_norm": 0.3364658057689667, "learning_rate": 2.9479333333333334e-05, "loss": 0.0089, "step": 11565 }, { "epoch": 21.339796860572484, "grad_norm": 0.16113604605197906, "learning_rate": 2.9479e-05, "loss": 0.0046, "step": 11566 }, { "epoch": 21.341643582640813, "grad_norm": 0.42057058215141296, "learning_rate": 2.9478666666666666e-05, "loss": 0.012, "step": 11567 }, { "epoch": 21.343490304709142, "grad_norm": 0.27273300290107727, "learning_rate": 2.9478333333333335e-05, "loss": 0.0057, "step": 11568 }, { "epoch": 21.34533702677747, "grad_norm": 0.21287663280963898, "learning_rate": 2.9478e-05, "loss": 0.0045, "step": 11569 }, { "epoch": 21.347183748845797, "grad_norm": 0.3194565176963806, "learning_rate": 2.9477666666666667e-05, "loss": 0.0107, "step": 11570 }, { "epoch": 21.349030470914126, "grad_norm": 0.3554210662841797, "learning_rate": 2.9477333333333333e-05, "loss": 0.0066, "step": 11571 }, { "epoch": 21.350877192982455, "grad_norm": 0.3108913004398346, "learning_rate": 2.9477000000000003e-05, "loss": 0.008, "step": 11572 }, { "epoch": 21.352723915050785, "grad_norm": 0.1508089303970337, "learning_rate": 2.9476666666666665e-05, "loss": 0.0059, "step": 11573 }, { "epoch": 21.354570637119114, "grad_norm": 0.41390153765678406, "learning_rate": 2.9476333333333334e-05, "loss": 0.0156, "step": 11574 }, { "epoch": 21.356417359187443, "grad_norm": 0.20888638496398926, "learning_rate": 2.9476e-05, "loss": 0.0032, "step": 11575 }, { "epoch": 21.358264081255772, "grad_norm": 0.29900670051574707, "learning_rate": 2.9475666666666666e-05, "loss": 0.0112, "step": 11576 }, { "epoch": 21.3601108033241, "grad_norm": 0.18397219479084015, "learning_rate": 2.9475333333333336e-05, "loss": 0.007, "step": 11577 }, { "epoch": 21.361957525392427, "grad_norm": 0.2705393135547638, "learning_rate": 2.9475e-05, "loss": 0.0085, "step": 11578 }, { "epoch": 21.363804247460756, "grad_norm": 0.6648091673851013, "learning_rate": 2.9474666666666667e-05, "loss": 0.0213, "step": 11579 }, { "epoch": 21.365650969529085, "grad_norm": 0.5839139819145203, "learning_rate": 2.9474333333333333e-05, "loss": 0.0153, "step": 11580 }, { "epoch": 21.367497691597414, "grad_norm": 0.2984607219696045, "learning_rate": 2.9474000000000003e-05, "loss": 0.0066, "step": 11581 }, { "epoch": 21.369344413665743, "grad_norm": 0.2725304365158081, "learning_rate": 2.9473666666666665e-05, "loss": 0.005, "step": 11582 }, { "epoch": 21.371191135734072, "grad_norm": 0.4688223600387573, "learning_rate": 2.9473333333333335e-05, "loss": 0.1274, "step": 11583 }, { "epoch": 21.3730378578024, "grad_norm": 0.5752362608909607, "learning_rate": 2.9473e-05, "loss": 0.1327, "step": 11584 }, { "epoch": 21.37488457987073, "grad_norm": 0.45737379789352417, "learning_rate": 2.9472666666666666e-05, "loss": 0.1189, "step": 11585 }, { "epoch": 21.37673130193906, "grad_norm": 0.3827018141746521, "learning_rate": 2.9472333333333336e-05, "loss": 0.0666, "step": 11586 }, { "epoch": 21.378578024007385, "grad_norm": 0.5348204970359802, "learning_rate": 2.9472000000000002e-05, "loss": 0.102, "step": 11587 }, { "epoch": 21.380424746075715, "grad_norm": 0.5674592852592468, "learning_rate": 2.9471666666666668e-05, "loss": 0.0716, "step": 11588 }, { "epoch": 21.382271468144044, "grad_norm": 0.3822150230407715, "learning_rate": 2.9471333333333334e-05, "loss": 0.0597, "step": 11589 }, { "epoch": 21.384118190212373, "grad_norm": 0.37607574462890625, "learning_rate": 2.9471000000000003e-05, "loss": 0.0414, "step": 11590 }, { "epoch": 21.385964912280702, "grad_norm": 0.36966872215270996, "learning_rate": 2.9470666666666665e-05, "loss": 0.0591, "step": 11591 }, { "epoch": 21.38781163434903, "grad_norm": 0.6840993762016296, "learning_rate": 2.9470333333333335e-05, "loss": 0.0338, "step": 11592 }, { "epoch": 21.38965835641736, "grad_norm": 0.3434557020664215, "learning_rate": 2.947e-05, "loss": 0.0374, "step": 11593 }, { "epoch": 21.39150507848569, "grad_norm": 0.3484346568584442, "learning_rate": 2.9469666666666667e-05, "loss": 0.0627, "step": 11594 }, { "epoch": 21.393351800554015, "grad_norm": 0.24304430186748505, "learning_rate": 2.9469333333333336e-05, "loss": 0.0188, "step": 11595 }, { "epoch": 21.395198522622344, "grad_norm": 0.2664783000946045, "learning_rate": 2.9469e-05, "loss": 0.0175, "step": 11596 }, { "epoch": 21.397045244690673, "grad_norm": 0.23857802152633667, "learning_rate": 2.9468666666666668e-05, "loss": 0.0339, "step": 11597 }, { "epoch": 21.398891966759003, "grad_norm": 0.2604442238807678, "learning_rate": 2.9468333333333334e-05, "loss": 0.0162, "step": 11598 }, { "epoch": 21.40073868882733, "grad_norm": 0.28916192054748535, "learning_rate": 2.9468e-05, "loss": 0.0106, "step": 11599 }, { "epoch": 21.40258541089566, "grad_norm": 0.1989450752735138, "learning_rate": 2.9467666666666666e-05, "loss": 0.0076, "step": 11600 }, { "epoch": 21.40443213296399, "grad_norm": 0.1816091239452362, "learning_rate": 2.9467333333333335e-05, "loss": 0.0064, "step": 11601 }, { "epoch": 21.40627885503232, "grad_norm": 0.3587622046470642, "learning_rate": 2.9467e-05, "loss": 0.0159, "step": 11602 }, { "epoch": 21.408125577100645, "grad_norm": 0.3580322265625, "learning_rate": 2.9466666666666667e-05, "loss": 0.009, "step": 11603 }, { "epoch": 21.409972299168974, "grad_norm": 0.2690403163433075, "learning_rate": 2.9466333333333336e-05, "loss": 0.0068, "step": 11604 }, { "epoch": 21.411819021237303, "grad_norm": 0.20682458579540253, "learning_rate": 2.9466e-05, "loss": 0.01, "step": 11605 }, { "epoch": 21.413665743305632, "grad_norm": 0.32515549659729004, "learning_rate": 2.9465666666666668e-05, "loss": 0.0132, "step": 11606 }, { "epoch": 21.41551246537396, "grad_norm": 0.2933991253376007, "learning_rate": 2.9465333333333334e-05, "loss": 0.01, "step": 11607 }, { "epoch": 21.41735918744229, "grad_norm": 0.3860311508178711, "learning_rate": 2.9465e-05, "loss": 0.006, "step": 11608 }, { "epoch": 21.41920590951062, "grad_norm": 0.40069878101348877, "learning_rate": 2.9464666666666666e-05, "loss": 0.0135, "step": 11609 }, { "epoch": 21.42105263157895, "grad_norm": 0.23569025099277496, "learning_rate": 2.9464333333333335e-05, "loss": 0.0064, "step": 11610 }, { "epoch": 21.422899353647278, "grad_norm": 0.33610761165618896, "learning_rate": 2.9464e-05, "loss": 0.0145, "step": 11611 }, { "epoch": 21.424746075715603, "grad_norm": 0.645300030708313, "learning_rate": 2.9463666666666667e-05, "loss": 0.018, "step": 11612 }, { "epoch": 21.426592797783933, "grad_norm": 0.5435053706169128, "learning_rate": 2.9463333333333336e-05, "loss": 0.0312, "step": 11613 }, { "epoch": 21.42843951985226, "grad_norm": 0.24929502606391907, "learning_rate": 2.9463e-05, "loss": 0.0065, "step": 11614 }, { "epoch": 21.43028624192059, "grad_norm": 0.3996683657169342, "learning_rate": 2.9462666666666668e-05, "loss": 0.0093, "step": 11615 }, { "epoch": 21.43213296398892, "grad_norm": 0.21856895089149475, "learning_rate": 2.9462333333333334e-05, "loss": 0.0103, "step": 11616 }, { "epoch": 21.43397968605725, "grad_norm": 0.386295348405838, "learning_rate": 2.9462e-05, "loss": 0.0129, "step": 11617 }, { "epoch": 21.43582640812558, "grad_norm": 0.14483925700187683, "learning_rate": 2.946166666666667e-05, "loss": 0.0049, "step": 11618 }, { "epoch": 21.437673130193907, "grad_norm": 0.14828045666217804, "learning_rate": 2.9461333333333335e-05, "loss": 0.0048, "step": 11619 }, { "epoch": 21.439519852262233, "grad_norm": 0.4934016764163971, "learning_rate": 2.9461e-05, "loss": 0.0209, "step": 11620 }, { "epoch": 21.441366574330562, "grad_norm": 0.35568687319755554, "learning_rate": 2.9460666666666667e-05, "loss": 0.0139, "step": 11621 }, { "epoch": 21.44321329639889, "grad_norm": 0.29019680619239807, "learning_rate": 2.9460333333333333e-05, "loss": 0.0047, "step": 11622 }, { "epoch": 21.44506001846722, "grad_norm": 0.3380444347858429, "learning_rate": 2.946e-05, "loss": 0.014, "step": 11623 }, { "epoch": 21.44690674053555, "grad_norm": 0.17599837481975555, "learning_rate": 2.945966666666667e-05, "loss": 0.0031, "step": 11624 }, { "epoch": 21.44875346260388, "grad_norm": 0.46131882071495056, "learning_rate": 2.945933333333333e-05, "loss": 0.0119, "step": 11625 }, { "epoch": 21.450600184672208, "grad_norm": 0.33762016892433167, "learning_rate": 2.9459e-05, "loss": 0.0108, "step": 11626 }, { "epoch": 21.452446906740537, "grad_norm": 0.2296239137649536, "learning_rate": 2.945866666666667e-05, "loss": 0.0047, "step": 11627 }, { "epoch": 21.454293628808863, "grad_norm": 0.09554245322942734, "learning_rate": 2.9458333333333332e-05, "loss": 0.0034, "step": 11628 }, { "epoch": 21.45614035087719, "grad_norm": 0.30348119139671326, "learning_rate": 2.9458e-05, "loss": 0.0156, "step": 11629 }, { "epoch": 21.45798707294552, "grad_norm": 0.5296110510826111, "learning_rate": 2.9457666666666667e-05, "loss": 0.011, "step": 11630 }, { "epoch": 21.45983379501385, "grad_norm": 0.33812788128852844, "learning_rate": 2.9457333333333333e-05, "loss": 0.0112, "step": 11631 }, { "epoch": 21.46168051708218, "grad_norm": 0.17619480192661285, "learning_rate": 2.9457e-05, "loss": 0.0038, "step": 11632 }, { "epoch": 21.46352723915051, "grad_norm": 0.5979994535446167, "learning_rate": 2.945666666666667e-05, "loss": 0.1385, "step": 11633 }, { "epoch": 21.465373961218837, "grad_norm": 0.5296694040298462, "learning_rate": 2.945633333333333e-05, "loss": 0.1222, "step": 11634 }, { "epoch": 21.467220683287167, "grad_norm": 0.8114538192749023, "learning_rate": 2.9456e-05, "loss": 0.0708, "step": 11635 }, { "epoch": 21.469067405355496, "grad_norm": 0.46399155259132385, "learning_rate": 2.945566666666667e-05, "loss": 0.069, "step": 11636 }, { "epoch": 21.47091412742382, "grad_norm": 0.6047453284263611, "learning_rate": 2.9455333333333332e-05, "loss": 0.075, "step": 11637 }, { "epoch": 21.47276084949215, "grad_norm": 0.47024619579315186, "learning_rate": 2.9455000000000002e-05, "loss": 0.0392, "step": 11638 }, { "epoch": 21.47460757156048, "grad_norm": 0.36386826634407043, "learning_rate": 2.9454666666666668e-05, "loss": 0.0485, "step": 11639 }, { "epoch": 21.47645429362881, "grad_norm": 0.4516194462776184, "learning_rate": 2.9454333333333334e-05, "loss": 0.0745, "step": 11640 }, { "epoch": 21.478301015697138, "grad_norm": 0.6910102367401123, "learning_rate": 2.9454e-05, "loss": 0.0718, "step": 11641 }, { "epoch": 21.480147737765467, "grad_norm": 0.2954343259334564, "learning_rate": 2.945366666666667e-05, "loss": 0.0422, "step": 11642 }, { "epoch": 21.481994459833796, "grad_norm": 0.44146180152893066, "learning_rate": 2.9453333333333335e-05, "loss": 0.0371, "step": 11643 }, { "epoch": 21.483841181902125, "grad_norm": 0.46557775139808655, "learning_rate": 2.9453e-05, "loss": 0.0684, "step": 11644 }, { "epoch": 21.48568790397045, "grad_norm": 0.3252936005592346, "learning_rate": 2.945266666666667e-05, "loss": 0.0207, "step": 11645 }, { "epoch": 21.48753462603878, "grad_norm": 0.47686776518821716, "learning_rate": 2.9452333333333333e-05, "loss": 0.0228, "step": 11646 }, { "epoch": 21.48938134810711, "grad_norm": 0.26284104585647583, "learning_rate": 2.9452000000000002e-05, "loss": 0.0237, "step": 11647 }, { "epoch": 21.49122807017544, "grad_norm": 0.20137442648410797, "learning_rate": 2.9451666666666668e-05, "loss": 0.0117, "step": 11648 }, { "epoch": 21.493074792243767, "grad_norm": 0.23116926848888397, "learning_rate": 2.9451333333333334e-05, "loss": 0.0108, "step": 11649 }, { "epoch": 21.494921514312097, "grad_norm": 0.1661728322505951, "learning_rate": 2.9451e-05, "loss": 0.0075, "step": 11650 }, { "epoch": 21.496768236380426, "grad_norm": 0.25384411215782166, "learning_rate": 2.945066666666667e-05, "loss": 0.0082, "step": 11651 }, { "epoch": 21.498614958448755, "grad_norm": 0.16077283024787903, "learning_rate": 2.9450333333333335e-05, "loss": 0.0109, "step": 11652 }, { "epoch": 21.50046168051708, "grad_norm": 0.31073248386383057, "learning_rate": 2.945e-05, "loss": 0.0081, "step": 11653 }, { "epoch": 21.50230840258541, "grad_norm": 0.32054486870765686, "learning_rate": 2.9449666666666667e-05, "loss": 0.007, "step": 11654 }, { "epoch": 21.50415512465374, "grad_norm": 0.23431295156478882, "learning_rate": 2.9449333333333333e-05, "loss": 0.0133, "step": 11655 }, { "epoch": 21.506001846722068, "grad_norm": 0.26101458072662354, "learning_rate": 2.9449000000000002e-05, "loss": 0.0107, "step": 11656 }, { "epoch": 21.507848568790397, "grad_norm": 0.22934867441654205, "learning_rate": 2.9448666666666665e-05, "loss": 0.0088, "step": 11657 }, { "epoch": 21.509695290858726, "grad_norm": 0.31780898571014404, "learning_rate": 2.9448333333333334e-05, "loss": 0.0093, "step": 11658 }, { "epoch": 21.511542012927055, "grad_norm": 0.3254026174545288, "learning_rate": 2.9448e-05, "loss": 0.0087, "step": 11659 }, { "epoch": 21.513388734995385, "grad_norm": 0.11738292127847672, "learning_rate": 2.9447666666666666e-05, "loss": 0.0049, "step": 11660 }, { "epoch": 21.51523545706371, "grad_norm": 0.3099237382411957, "learning_rate": 2.9447333333333335e-05, "loss": 0.0099, "step": 11661 }, { "epoch": 21.51708217913204, "grad_norm": 0.09953440725803375, "learning_rate": 2.9447e-05, "loss": 0.0033, "step": 11662 }, { "epoch": 21.51892890120037, "grad_norm": 0.6011154651641846, "learning_rate": 2.9446666666666667e-05, "loss": 0.0159, "step": 11663 }, { "epoch": 21.520775623268698, "grad_norm": 0.298566609621048, "learning_rate": 2.9446333333333333e-05, "loss": 0.0161, "step": 11664 }, { "epoch": 21.522622345337027, "grad_norm": 0.36138394474983215, "learning_rate": 2.9446000000000002e-05, "loss": 0.0054, "step": 11665 }, { "epoch": 21.524469067405356, "grad_norm": 0.26504313945770264, "learning_rate": 2.9445666666666665e-05, "loss": 0.0073, "step": 11666 }, { "epoch": 21.526315789473685, "grad_norm": 0.15179431438446045, "learning_rate": 2.9445333333333334e-05, "loss": 0.0036, "step": 11667 }, { "epoch": 21.528162511542014, "grad_norm": 0.34152984619140625, "learning_rate": 2.9445000000000004e-05, "loss": 0.006, "step": 11668 }, { "epoch": 21.530009233610343, "grad_norm": 0.1816384345293045, "learning_rate": 2.9444666666666666e-05, "loss": 0.0035, "step": 11669 }, { "epoch": 21.53185595567867, "grad_norm": 0.4318518340587616, "learning_rate": 2.9444333333333335e-05, "loss": 0.0088, "step": 11670 }, { "epoch": 21.533702677746998, "grad_norm": 0.2734701931476593, "learning_rate": 2.9444e-05, "loss": 0.0063, "step": 11671 }, { "epoch": 21.535549399815327, "grad_norm": 0.1769401878118515, "learning_rate": 2.9443666666666667e-05, "loss": 0.0046, "step": 11672 }, { "epoch": 21.537396121883656, "grad_norm": 0.3240947127342224, "learning_rate": 2.9443333333333333e-05, "loss": 0.0066, "step": 11673 }, { "epoch": 21.539242843951985, "grad_norm": 0.40373358130455017, "learning_rate": 2.9443000000000003e-05, "loss": 0.013, "step": 11674 }, { "epoch": 21.541089566020315, "grad_norm": 0.41496601700782776, "learning_rate": 2.9442666666666665e-05, "loss": 0.0065, "step": 11675 }, { "epoch": 21.542936288088644, "grad_norm": 0.32355672121047974, "learning_rate": 2.9442333333333334e-05, "loss": 0.0101, "step": 11676 }, { "epoch": 21.544783010156973, "grad_norm": 0.26241594552993774, "learning_rate": 2.9442000000000004e-05, "loss": 0.0078, "step": 11677 }, { "epoch": 21.5466297322253, "grad_norm": 0.10817354172468185, "learning_rate": 2.9441666666666666e-05, "loss": 0.0037, "step": 11678 }, { "epoch": 21.548476454293628, "grad_norm": 0.05409083515405655, "learning_rate": 2.9441333333333336e-05, "loss": 0.0009, "step": 11679 }, { "epoch": 21.550323176361957, "grad_norm": 0.25125715136528015, "learning_rate": 2.9441e-05, "loss": 0.003, "step": 11680 }, { "epoch": 21.552169898430286, "grad_norm": 0.7134154438972473, "learning_rate": 2.9440666666666667e-05, "loss": 0.0158, "step": 11681 }, { "epoch": 21.554016620498615, "grad_norm": 0.40545278787612915, "learning_rate": 2.9440333333333333e-05, "loss": 0.0083, "step": 11682 }, { "epoch": 21.555863342566944, "grad_norm": 0.6952413320541382, "learning_rate": 2.944e-05, "loss": 0.2054, "step": 11683 }, { "epoch": 21.557710064635273, "grad_norm": 0.5398013591766357, "learning_rate": 2.9439666666666665e-05, "loss": 0.1455, "step": 11684 }, { "epoch": 21.559556786703602, "grad_norm": 0.4659029543399811, "learning_rate": 2.9439333333333335e-05, "loss": 0.0803, "step": 11685 }, { "epoch": 21.56140350877193, "grad_norm": 0.48628848791122437, "learning_rate": 2.9439e-05, "loss": 0.1113, "step": 11686 }, { "epoch": 21.563250230840257, "grad_norm": 0.6104269623756409, "learning_rate": 2.9438666666666666e-05, "loss": 0.1125, "step": 11687 }, { "epoch": 21.565096952908586, "grad_norm": 0.5309699177742004, "learning_rate": 2.9438333333333336e-05, "loss": 0.0874, "step": 11688 }, { "epoch": 21.566943674976915, "grad_norm": 0.4424571394920349, "learning_rate": 2.9438e-05, "loss": 0.0724, "step": 11689 }, { "epoch": 21.568790397045245, "grad_norm": 0.4594380259513855, "learning_rate": 2.9437666666666668e-05, "loss": 0.0628, "step": 11690 }, { "epoch": 21.570637119113574, "grad_norm": 0.7726163864135742, "learning_rate": 2.9437333333333334e-05, "loss": 0.0555, "step": 11691 }, { "epoch": 21.572483841181903, "grad_norm": 0.3133237361907959, "learning_rate": 2.9437e-05, "loss": 0.0476, "step": 11692 }, { "epoch": 21.574330563250232, "grad_norm": 0.39666256308555603, "learning_rate": 2.943666666666667e-05, "loss": 0.0348, "step": 11693 }, { "epoch": 21.57617728531856, "grad_norm": 0.3721177577972412, "learning_rate": 2.9436333333333335e-05, "loss": 0.0317, "step": 11694 }, { "epoch": 21.578024007386887, "grad_norm": 0.23958668112754822, "learning_rate": 2.9436e-05, "loss": 0.0363, "step": 11695 }, { "epoch": 21.579870729455216, "grad_norm": 0.4093874990940094, "learning_rate": 2.9435666666666667e-05, "loss": 0.0421, "step": 11696 }, { "epoch": 21.581717451523545, "grad_norm": 0.24053242802619934, "learning_rate": 2.9435333333333336e-05, "loss": 0.0342, "step": 11697 }, { "epoch": 21.583564173591874, "grad_norm": 0.3315214216709137, "learning_rate": 2.9435e-05, "loss": 0.0284, "step": 11698 }, { "epoch": 21.585410895660203, "grad_norm": 0.18025685846805573, "learning_rate": 2.9434666666666668e-05, "loss": 0.0115, "step": 11699 }, { "epoch": 21.587257617728532, "grad_norm": 0.37826457619667053, "learning_rate": 2.9434333333333334e-05, "loss": 0.0154, "step": 11700 }, { "epoch": 21.58910433979686, "grad_norm": 0.5563769340515137, "learning_rate": 2.9434e-05, "loss": 0.0287, "step": 11701 }, { "epoch": 21.59095106186519, "grad_norm": 0.3572843670845032, "learning_rate": 2.943366666666667e-05, "loss": 0.0363, "step": 11702 }, { "epoch": 21.592797783933516, "grad_norm": 0.2566000521183014, "learning_rate": 2.9433333333333335e-05, "loss": 0.0102, "step": 11703 }, { "epoch": 21.594644506001845, "grad_norm": 0.16738413274288177, "learning_rate": 2.9433e-05, "loss": 0.0049, "step": 11704 }, { "epoch": 21.596491228070175, "grad_norm": 0.31159695982933044, "learning_rate": 2.9432666666666667e-05, "loss": 0.0089, "step": 11705 }, { "epoch": 21.598337950138504, "grad_norm": 0.17356759309768677, "learning_rate": 2.9432333333333336e-05, "loss": 0.0072, "step": 11706 }, { "epoch": 21.600184672206833, "grad_norm": 0.2614094614982605, "learning_rate": 2.9432e-05, "loss": 0.0084, "step": 11707 }, { "epoch": 21.602031394275162, "grad_norm": 0.6964510083198547, "learning_rate": 2.9431666666666668e-05, "loss": 0.0116, "step": 11708 }, { "epoch": 21.60387811634349, "grad_norm": 0.30797988176345825, "learning_rate": 2.9431333333333334e-05, "loss": 0.0083, "step": 11709 }, { "epoch": 21.60572483841182, "grad_norm": 0.23163697123527527, "learning_rate": 2.9431e-05, "loss": 0.009, "step": 11710 }, { "epoch": 21.607571560480146, "grad_norm": 0.4534711241722107, "learning_rate": 2.943066666666667e-05, "loss": 0.0165, "step": 11711 }, { "epoch": 21.609418282548475, "grad_norm": 0.29898273944854736, "learning_rate": 2.9430333333333332e-05, "loss": 0.0096, "step": 11712 }, { "epoch": 21.611265004616804, "grad_norm": 0.3022543787956238, "learning_rate": 2.943e-05, "loss": 0.0371, "step": 11713 }, { "epoch": 21.613111726685133, "grad_norm": 0.241398423910141, "learning_rate": 2.9429666666666667e-05, "loss": 0.0057, "step": 11714 }, { "epoch": 21.614958448753463, "grad_norm": 0.15573015809059143, "learning_rate": 2.9429333333333333e-05, "loss": 0.0061, "step": 11715 }, { "epoch": 21.61680517082179, "grad_norm": 0.15038542449474335, "learning_rate": 2.9429e-05, "loss": 0.0046, "step": 11716 }, { "epoch": 21.61865189289012, "grad_norm": 0.3136591911315918, "learning_rate": 2.9428666666666668e-05, "loss": 0.0084, "step": 11717 }, { "epoch": 21.62049861495845, "grad_norm": 0.14817532896995544, "learning_rate": 2.9428333333333334e-05, "loss": 0.0038, "step": 11718 }, { "epoch": 21.62234533702678, "grad_norm": 0.1666664481163025, "learning_rate": 2.9428e-05, "loss": 0.0081, "step": 11719 }, { "epoch": 21.624192059095105, "grad_norm": 0.10841329395771027, "learning_rate": 2.942766666666667e-05, "loss": 0.0065, "step": 11720 }, { "epoch": 21.626038781163434, "grad_norm": 0.28235870599746704, "learning_rate": 2.9427333333333332e-05, "loss": 0.0103, "step": 11721 }, { "epoch": 21.627885503231763, "grad_norm": 0.12024784833192825, "learning_rate": 2.9427e-05, "loss": 0.0035, "step": 11722 }, { "epoch": 21.629732225300092, "grad_norm": 0.4908735752105713, "learning_rate": 2.9426666666666667e-05, "loss": 0.0067, "step": 11723 }, { "epoch": 21.63157894736842, "grad_norm": 0.21267184615135193, "learning_rate": 2.9426333333333333e-05, "loss": 0.0056, "step": 11724 }, { "epoch": 21.63342566943675, "grad_norm": 0.42651426792144775, "learning_rate": 2.9426e-05, "loss": 0.0078, "step": 11725 }, { "epoch": 21.63527239150508, "grad_norm": 0.16636477410793304, "learning_rate": 2.942566666666667e-05, "loss": 0.008, "step": 11726 }, { "epoch": 21.63711911357341, "grad_norm": 0.26599639654159546, "learning_rate": 2.9425333333333334e-05, "loss": 0.0101, "step": 11727 }, { "epoch": 21.638965835641734, "grad_norm": 0.2585376799106598, "learning_rate": 2.9425e-05, "loss": 0.0079, "step": 11728 }, { "epoch": 21.640812557710063, "grad_norm": 0.5444880723953247, "learning_rate": 2.942466666666667e-05, "loss": 0.0138, "step": 11729 }, { "epoch": 21.642659279778393, "grad_norm": 0.9459370970726013, "learning_rate": 2.9424333333333332e-05, "loss": 0.0199, "step": 11730 }, { "epoch": 21.64450600184672, "grad_norm": 0.5683426260948181, "learning_rate": 2.9424e-05, "loss": 0.0075, "step": 11731 }, { "epoch": 21.64635272391505, "grad_norm": 0.42870140075683594, "learning_rate": 2.9423666666666667e-05, "loss": 0.0194, "step": 11732 }, { "epoch": 21.64819944598338, "grad_norm": 0.48324406147003174, "learning_rate": 2.9423333333333333e-05, "loss": 0.1129, "step": 11733 }, { "epoch": 21.65004616805171, "grad_norm": 0.5933670997619629, "learning_rate": 2.9423e-05, "loss": 0.0805, "step": 11734 }, { "epoch": 21.65189289012004, "grad_norm": 0.9176157116889954, "learning_rate": 2.942266666666667e-05, "loss": 0.0999, "step": 11735 }, { "epoch": 21.653739612188367, "grad_norm": 0.5372908115386963, "learning_rate": 2.9422333333333335e-05, "loss": 0.1085, "step": 11736 }, { "epoch": 21.655586334256693, "grad_norm": 0.9991260766983032, "learning_rate": 2.9422e-05, "loss": 0.1169, "step": 11737 }, { "epoch": 21.657433056325022, "grad_norm": 0.4064606726169586, "learning_rate": 2.942166666666667e-05, "loss": 0.0742, "step": 11738 }, { "epoch": 21.65927977839335, "grad_norm": 0.406076580286026, "learning_rate": 2.9421333333333332e-05, "loss": 0.0398, "step": 11739 }, { "epoch": 21.66112650046168, "grad_norm": 0.976673424243927, "learning_rate": 2.9421000000000002e-05, "loss": 0.0725, "step": 11740 }, { "epoch": 21.66297322253001, "grad_norm": 0.4736194908618927, "learning_rate": 2.9420666666666668e-05, "loss": 0.0688, "step": 11741 }, { "epoch": 21.66481994459834, "grad_norm": 0.374492347240448, "learning_rate": 2.9420333333333334e-05, "loss": 0.0372, "step": 11742 }, { "epoch": 21.666666666666668, "grad_norm": 0.31480005383491516, "learning_rate": 2.9420000000000003e-05, "loss": 0.0285, "step": 11743 }, { "epoch": 21.668513388734997, "grad_norm": 0.40599334239959717, "learning_rate": 2.9419666666666665e-05, "loss": 0.0283, "step": 11744 }, { "epoch": 21.670360110803323, "grad_norm": 0.594463050365448, "learning_rate": 2.9419333333333335e-05, "loss": 0.0769, "step": 11745 }, { "epoch": 21.67220683287165, "grad_norm": 0.35580018162727356, "learning_rate": 2.9419e-05, "loss": 0.0331, "step": 11746 }, { "epoch": 21.67405355493998, "grad_norm": 0.43213754892349243, "learning_rate": 2.9418666666666667e-05, "loss": 0.013, "step": 11747 }, { "epoch": 21.67590027700831, "grad_norm": 0.5789579749107361, "learning_rate": 2.9418333333333333e-05, "loss": 0.0095, "step": 11748 }, { "epoch": 21.67774699907664, "grad_norm": 0.26206323504447937, "learning_rate": 2.9418000000000002e-05, "loss": 0.0112, "step": 11749 }, { "epoch": 21.67959372114497, "grad_norm": 0.30900248885154724, "learning_rate": 2.9417666666666664e-05, "loss": 0.0095, "step": 11750 }, { "epoch": 21.681440443213297, "grad_norm": 0.1787906438112259, "learning_rate": 2.9417333333333334e-05, "loss": 0.0087, "step": 11751 }, { "epoch": 21.683287165281627, "grad_norm": 0.15706923604011536, "learning_rate": 2.9417000000000003e-05, "loss": 0.0085, "step": 11752 }, { "epoch": 21.685133887349952, "grad_norm": 0.160160630941391, "learning_rate": 2.9416666666666666e-05, "loss": 0.0179, "step": 11753 }, { "epoch": 21.68698060941828, "grad_norm": 0.3976205587387085, "learning_rate": 2.9416333333333335e-05, "loss": 0.0166, "step": 11754 }, { "epoch": 21.68882733148661, "grad_norm": 0.45126911997795105, "learning_rate": 2.9416e-05, "loss": 0.0144, "step": 11755 }, { "epoch": 21.69067405355494, "grad_norm": 0.24339599907398224, "learning_rate": 2.9415666666666667e-05, "loss": 0.0042, "step": 11756 }, { "epoch": 21.69252077562327, "grad_norm": 0.32987111806869507, "learning_rate": 2.9415333333333333e-05, "loss": 0.0105, "step": 11757 }, { "epoch": 21.694367497691598, "grad_norm": 0.39428460597991943, "learning_rate": 2.9415000000000002e-05, "loss": 0.0236, "step": 11758 }, { "epoch": 21.696214219759927, "grad_norm": 0.3154822289943695, "learning_rate": 2.9414666666666668e-05, "loss": 0.0122, "step": 11759 }, { "epoch": 21.698060941828256, "grad_norm": 0.3474328815937042, "learning_rate": 2.9414333333333334e-05, "loss": 0.0129, "step": 11760 }, { "epoch": 21.69990766389658, "grad_norm": 0.09453830868005753, "learning_rate": 2.9414000000000003e-05, "loss": 0.0029, "step": 11761 }, { "epoch": 21.70175438596491, "grad_norm": 0.32519862055778503, "learning_rate": 2.9413666666666666e-05, "loss": 0.0547, "step": 11762 }, { "epoch": 21.70360110803324, "grad_norm": 0.2847834825515747, "learning_rate": 2.9413333333333335e-05, "loss": 0.0051, "step": 11763 }, { "epoch": 21.70544783010157, "grad_norm": 0.4314049184322357, "learning_rate": 2.9413e-05, "loss": 0.0189, "step": 11764 }, { "epoch": 21.7072945521699, "grad_norm": 0.1875690221786499, "learning_rate": 2.9412666666666667e-05, "loss": 0.004, "step": 11765 }, { "epoch": 21.709141274238227, "grad_norm": 0.118055060505867, "learning_rate": 2.9412333333333333e-05, "loss": 0.0031, "step": 11766 }, { "epoch": 21.710987996306557, "grad_norm": 0.11226514726877213, "learning_rate": 2.9412000000000002e-05, "loss": 0.0041, "step": 11767 }, { "epoch": 21.712834718374886, "grad_norm": 0.3839562237262726, "learning_rate": 2.9411666666666668e-05, "loss": 0.0067, "step": 11768 }, { "epoch": 21.714681440443215, "grad_norm": 0.06365108489990234, "learning_rate": 2.9411333333333334e-05, "loss": 0.002, "step": 11769 }, { "epoch": 21.71652816251154, "grad_norm": 0.3811405301094055, "learning_rate": 2.9411000000000004e-05, "loss": 0.0096, "step": 11770 }, { "epoch": 21.71837488457987, "grad_norm": 0.4699449837207794, "learning_rate": 2.9410666666666666e-05, "loss": 0.0139, "step": 11771 }, { "epoch": 21.7202216066482, "grad_norm": 0.26368317008018494, "learning_rate": 2.9410333333333335e-05, "loss": 0.0096, "step": 11772 }, { "epoch": 21.722068328716528, "grad_norm": 0.14234060049057007, "learning_rate": 2.9409999999999998e-05, "loss": 0.003, "step": 11773 }, { "epoch": 21.723915050784857, "grad_norm": 0.6422532200813293, "learning_rate": 2.9409666666666667e-05, "loss": 0.0093, "step": 11774 }, { "epoch": 21.725761772853186, "grad_norm": 0.22176691889762878, "learning_rate": 2.9409333333333333e-05, "loss": 0.0046, "step": 11775 }, { "epoch": 21.727608494921515, "grad_norm": 0.30540087819099426, "learning_rate": 2.9409e-05, "loss": 0.0061, "step": 11776 }, { "epoch": 21.729455216989845, "grad_norm": 0.5490514039993286, "learning_rate": 2.940866666666667e-05, "loss": 0.0117, "step": 11777 }, { "epoch": 21.73130193905817, "grad_norm": 0.3425533175468445, "learning_rate": 2.9408333333333334e-05, "loss": 0.0123, "step": 11778 }, { "epoch": 21.7331486611265, "grad_norm": 0.2820558249950409, "learning_rate": 2.9408e-05, "loss": 0.0091, "step": 11779 }, { "epoch": 21.73499538319483, "grad_norm": 0.20925366878509521, "learning_rate": 2.9407666666666666e-05, "loss": 0.0083, "step": 11780 }, { "epoch": 21.736842105263158, "grad_norm": 0.22122719883918762, "learning_rate": 2.9407333333333336e-05, "loss": 0.0082, "step": 11781 }, { "epoch": 21.738688827331487, "grad_norm": 0.36879807710647583, "learning_rate": 2.9406999999999998e-05, "loss": 0.0145, "step": 11782 }, { "epoch": 21.740535549399816, "grad_norm": 0.7262082695960999, "learning_rate": 2.9406666666666667e-05, "loss": 0.1864, "step": 11783 }, { "epoch": 21.742382271468145, "grad_norm": 0.5911471247673035, "learning_rate": 2.9406333333333333e-05, "loss": 0.1308, "step": 11784 }, { "epoch": 21.744228993536474, "grad_norm": 0.5021706819534302, "learning_rate": 2.9406e-05, "loss": 0.1253, "step": 11785 }, { "epoch": 21.746075715604803, "grad_norm": 0.42258352041244507, "learning_rate": 2.940566666666667e-05, "loss": 0.1105, "step": 11786 }, { "epoch": 21.74792243767313, "grad_norm": 0.4874754250049591, "learning_rate": 2.9405333333333335e-05, "loss": 0.0881, "step": 11787 }, { "epoch": 21.749769159741458, "grad_norm": 0.41108840703964233, "learning_rate": 2.9405e-05, "loss": 0.0996, "step": 11788 }, { "epoch": 21.751615881809787, "grad_norm": 0.4531548321247101, "learning_rate": 2.9404666666666666e-05, "loss": 0.0555, "step": 11789 }, { "epoch": 21.753462603878116, "grad_norm": 0.6766632795333862, "learning_rate": 2.9404333333333336e-05, "loss": 0.055, "step": 11790 }, { "epoch": 21.755309325946445, "grad_norm": 0.36510834097862244, "learning_rate": 2.9404e-05, "loss": 0.0607, "step": 11791 }, { "epoch": 21.757156048014775, "grad_norm": 0.38905462622642517, "learning_rate": 2.9403666666666668e-05, "loss": 0.0764, "step": 11792 }, { "epoch": 21.759002770083104, "grad_norm": 0.33395567536354065, "learning_rate": 2.9403333333333337e-05, "loss": 0.0285, "step": 11793 }, { "epoch": 21.760849492151433, "grad_norm": 0.5528409481048584, "learning_rate": 2.9403e-05, "loss": 0.0547, "step": 11794 }, { "epoch": 21.76269621421976, "grad_norm": 0.3216876685619354, "learning_rate": 2.940266666666667e-05, "loss": 0.017, "step": 11795 }, { "epoch": 21.764542936288088, "grad_norm": 0.33285337686538696, "learning_rate": 2.9402333333333335e-05, "loss": 0.0276, "step": 11796 }, { "epoch": 21.766389658356417, "grad_norm": 0.33139219880104065, "learning_rate": 2.9402e-05, "loss": 0.0302, "step": 11797 }, { "epoch": 21.768236380424746, "grad_norm": 0.19674275815486908, "learning_rate": 2.9401666666666667e-05, "loss": 0.0115, "step": 11798 }, { "epoch": 21.770083102493075, "grad_norm": 0.3641238510608673, "learning_rate": 2.9401333333333336e-05, "loss": 0.0119, "step": 11799 }, { "epoch": 21.771929824561404, "grad_norm": 0.24481379985809326, "learning_rate": 2.9401e-05, "loss": 0.0083, "step": 11800 }, { "epoch": 21.773776546629733, "grad_norm": 0.3730131685733795, "learning_rate": 2.9400666666666668e-05, "loss": 0.041, "step": 11801 }, { "epoch": 21.775623268698062, "grad_norm": 0.24020326137542725, "learning_rate": 2.9400333333333337e-05, "loss": 0.0117, "step": 11802 }, { "epoch": 21.777469990766388, "grad_norm": 0.16403193771839142, "learning_rate": 2.94e-05, "loss": 0.0082, "step": 11803 }, { "epoch": 21.779316712834717, "grad_norm": 0.19418635964393616, "learning_rate": 2.939966666666667e-05, "loss": 0.0092, "step": 11804 }, { "epoch": 21.781163434903046, "grad_norm": 0.2227737009525299, "learning_rate": 2.939933333333333e-05, "loss": 0.0083, "step": 11805 }, { "epoch": 21.783010156971375, "grad_norm": 0.2907804250717163, "learning_rate": 2.9399e-05, "loss": 0.0072, "step": 11806 }, { "epoch": 21.784856879039705, "grad_norm": 0.24682292342185974, "learning_rate": 2.9398666666666667e-05, "loss": 0.0126, "step": 11807 }, { "epoch": 21.786703601108034, "grad_norm": 0.31712350249290466, "learning_rate": 2.9398333333333333e-05, "loss": 0.0087, "step": 11808 }, { "epoch": 21.788550323176363, "grad_norm": 0.7004305720329285, "learning_rate": 2.9398000000000002e-05, "loss": 0.0182, "step": 11809 }, { "epoch": 21.790397045244692, "grad_norm": 0.204494446516037, "learning_rate": 2.9397666666666668e-05, "loss": 0.0048, "step": 11810 }, { "epoch": 21.792243767313018, "grad_norm": 0.3658459782600403, "learning_rate": 2.9397333333333334e-05, "loss": 0.0273, "step": 11811 }, { "epoch": 21.794090489381347, "grad_norm": 0.20716539025306702, "learning_rate": 2.9397e-05, "loss": 0.0075, "step": 11812 }, { "epoch": 21.795937211449676, "grad_norm": 0.1901579350233078, "learning_rate": 2.939666666666667e-05, "loss": 0.0103, "step": 11813 }, { "epoch": 21.797783933518005, "grad_norm": 0.07081008702516556, "learning_rate": 2.9396333333333332e-05, "loss": 0.0021, "step": 11814 }, { "epoch": 21.799630655586334, "grad_norm": 0.7699052095413208, "learning_rate": 2.9396e-05, "loss": 0.0122, "step": 11815 }, { "epoch": 21.801477377654663, "grad_norm": 0.12213122844696045, "learning_rate": 2.9395666666666667e-05, "loss": 0.0033, "step": 11816 }, { "epoch": 21.803324099722992, "grad_norm": 0.3606746792793274, "learning_rate": 2.9395333333333333e-05, "loss": 0.0054, "step": 11817 }, { "epoch": 21.80517082179132, "grad_norm": 0.24104273319244385, "learning_rate": 2.9395000000000002e-05, "loss": 0.0064, "step": 11818 }, { "epoch": 21.80701754385965, "grad_norm": 0.2115449160337448, "learning_rate": 2.9394666666666668e-05, "loss": 0.029, "step": 11819 }, { "epoch": 21.808864265927976, "grad_norm": 0.8519856929779053, "learning_rate": 2.9394333333333334e-05, "loss": 0.0106, "step": 11820 }, { "epoch": 21.810710987996305, "grad_norm": 0.44638770818710327, "learning_rate": 2.9394e-05, "loss": 0.0109, "step": 11821 }, { "epoch": 21.812557710064635, "grad_norm": 0.33324283361434937, "learning_rate": 2.939366666666667e-05, "loss": 0.0129, "step": 11822 }, { "epoch": 21.814404432132964, "grad_norm": 0.10796014219522476, "learning_rate": 2.9393333333333332e-05, "loss": 0.0025, "step": 11823 }, { "epoch": 21.816251154201293, "grad_norm": 0.3488299250602722, "learning_rate": 2.9393e-05, "loss": 0.0109, "step": 11824 }, { "epoch": 21.818097876269622, "grad_norm": 0.17116694152355194, "learning_rate": 2.9392666666666667e-05, "loss": 0.0043, "step": 11825 }, { "epoch": 21.81994459833795, "grad_norm": 0.44072869420051575, "learning_rate": 2.9392333333333333e-05, "loss": 0.0129, "step": 11826 }, { "epoch": 21.82179132040628, "grad_norm": 0.2730376422405243, "learning_rate": 2.9392000000000003e-05, "loss": 0.0067, "step": 11827 }, { "epoch": 21.823638042474606, "grad_norm": 0.5067234635353088, "learning_rate": 2.939166666666667e-05, "loss": 0.0134, "step": 11828 }, { "epoch": 21.825484764542935, "grad_norm": 0.21032480895519257, "learning_rate": 2.9391333333333334e-05, "loss": 0.007, "step": 11829 }, { "epoch": 21.827331486611264, "grad_norm": 0.5482940077781677, "learning_rate": 2.9391e-05, "loss": 0.0253, "step": 11830 }, { "epoch": 21.829178208679593, "grad_norm": 1.3305041790008545, "learning_rate": 2.939066666666667e-05, "loss": 0.0256, "step": 11831 }, { "epoch": 21.831024930747922, "grad_norm": 0.6527359485626221, "learning_rate": 2.9390333333333332e-05, "loss": 0.0083, "step": 11832 }, { "epoch": 21.83287165281625, "grad_norm": 0.6475838422775269, "learning_rate": 2.939e-05, "loss": 0.202, "step": 11833 }, { "epoch": 21.83471837488458, "grad_norm": 0.5826760530471802, "learning_rate": 2.9389666666666667e-05, "loss": 0.1203, "step": 11834 }, { "epoch": 21.83656509695291, "grad_norm": 0.5271478295326233, "learning_rate": 2.9389333333333333e-05, "loss": 0.0983, "step": 11835 }, { "epoch": 21.83841181902124, "grad_norm": 0.4728860557079315, "learning_rate": 2.9389000000000003e-05, "loss": 0.0835, "step": 11836 }, { "epoch": 21.840258541089565, "grad_norm": 0.4163111746311188, "learning_rate": 2.9388666666666665e-05, "loss": 0.0544, "step": 11837 }, { "epoch": 21.842105263157894, "grad_norm": 0.8589546084403992, "learning_rate": 2.9388333333333335e-05, "loss": 0.056, "step": 11838 }, { "epoch": 21.843951985226223, "grad_norm": 0.3603813350200653, "learning_rate": 2.9388e-05, "loss": 0.0418, "step": 11839 }, { "epoch": 21.845798707294552, "grad_norm": 0.4066253900527954, "learning_rate": 2.9387666666666666e-05, "loss": 0.0759, "step": 11840 }, { "epoch": 21.84764542936288, "grad_norm": 0.35721030831336975, "learning_rate": 2.9387333333333332e-05, "loss": 0.0442, "step": 11841 }, { "epoch": 21.84949215143121, "grad_norm": 0.36084261536598206, "learning_rate": 2.9387000000000002e-05, "loss": 0.0407, "step": 11842 }, { "epoch": 21.85133887349954, "grad_norm": 0.7102795839309692, "learning_rate": 2.9386666666666668e-05, "loss": 0.0429, "step": 11843 }, { "epoch": 21.85318559556787, "grad_norm": 0.29509997367858887, "learning_rate": 2.9386333333333334e-05, "loss": 0.0271, "step": 11844 }, { "epoch": 21.855032317636194, "grad_norm": 0.4171082079410553, "learning_rate": 2.9386000000000003e-05, "loss": 0.0446, "step": 11845 }, { "epoch": 21.856879039704523, "grad_norm": 0.30098018050193787, "learning_rate": 2.9385666666666665e-05, "loss": 0.0382, "step": 11846 }, { "epoch": 21.858725761772853, "grad_norm": 0.3387888967990875, "learning_rate": 2.9385333333333335e-05, "loss": 0.0571, "step": 11847 }, { "epoch": 21.86057248384118, "grad_norm": 0.25202545523643494, "learning_rate": 2.9385e-05, "loss": 0.0177, "step": 11848 }, { "epoch": 21.86241920590951, "grad_norm": 0.3939182758331299, "learning_rate": 2.9384666666666667e-05, "loss": 0.0096, "step": 11849 }, { "epoch": 21.86426592797784, "grad_norm": 0.16526532173156738, "learning_rate": 2.9384333333333333e-05, "loss": 0.0074, "step": 11850 }, { "epoch": 21.86611265004617, "grad_norm": 0.3572503626346588, "learning_rate": 2.9384000000000002e-05, "loss": 0.0159, "step": 11851 }, { "epoch": 21.8679593721145, "grad_norm": 0.4337249994277954, "learning_rate": 2.9383666666666668e-05, "loss": 0.041, "step": 11852 }, { "epoch": 21.869806094182824, "grad_norm": 0.26018592715263367, "learning_rate": 2.9383333333333334e-05, "loss": 0.0259, "step": 11853 }, { "epoch": 21.871652816251153, "grad_norm": 0.2274974286556244, "learning_rate": 2.9383000000000003e-05, "loss": 0.012, "step": 11854 }, { "epoch": 21.873499538319482, "grad_norm": 0.4401700496673584, "learning_rate": 2.9382666666666666e-05, "loss": 0.0077, "step": 11855 }, { "epoch": 21.87534626038781, "grad_norm": 0.4011286795139313, "learning_rate": 2.9382333333333335e-05, "loss": 0.0135, "step": 11856 }, { "epoch": 21.87719298245614, "grad_norm": 0.1697901487350464, "learning_rate": 2.9382e-05, "loss": 0.007, "step": 11857 }, { "epoch": 21.87903970452447, "grad_norm": 0.5393462181091309, "learning_rate": 2.9381666666666667e-05, "loss": 0.0145, "step": 11858 }, { "epoch": 21.8808864265928, "grad_norm": 0.2966288626194, "learning_rate": 2.9381333333333336e-05, "loss": 0.008, "step": 11859 }, { "epoch": 21.882733148661128, "grad_norm": 0.35856500267982483, "learning_rate": 2.9381000000000002e-05, "loss": 0.0425, "step": 11860 }, { "epoch": 21.884579870729453, "grad_norm": 0.23906317353248596, "learning_rate": 2.9380666666666668e-05, "loss": 0.0113, "step": 11861 }, { "epoch": 21.886426592797783, "grad_norm": 0.2855496108531952, "learning_rate": 2.9380333333333334e-05, "loss": 0.0099, "step": 11862 }, { "epoch": 21.88827331486611, "grad_norm": 0.5273758769035339, "learning_rate": 2.938e-05, "loss": 0.0099, "step": 11863 }, { "epoch": 21.89012003693444, "grad_norm": 0.2559138536453247, "learning_rate": 2.9379666666666666e-05, "loss": 0.0084, "step": 11864 }, { "epoch": 21.89196675900277, "grad_norm": 0.31710177659988403, "learning_rate": 2.9379333333333335e-05, "loss": 0.0111, "step": 11865 }, { "epoch": 21.8938134810711, "grad_norm": 0.32185274362564087, "learning_rate": 2.9378999999999998e-05, "loss": 0.0124, "step": 11866 }, { "epoch": 21.89566020313943, "grad_norm": 0.21019087731838226, "learning_rate": 2.9378666666666667e-05, "loss": 0.0072, "step": 11867 }, { "epoch": 21.897506925207757, "grad_norm": 0.25909486413002014, "learning_rate": 2.9378333333333336e-05, "loss": 0.0059, "step": 11868 }, { "epoch": 21.899353647276087, "grad_norm": 0.3391138017177582, "learning_rate": 2.9378e-05, "loss": 0.0408, "step": 11869 }, { "epoch": 21.901200369344412, "grad_norm": 0.4207199215888977, "learning_rate": 2.9377666666666668e-05, "loss": 0.0093, "step": 11870 }, { "epoch": 21.90304709141274, "grad_norm": 0.4389612078666687, "learning_rate": 2.9377333333333334e-05, "loss": 0.0103, "step": 11871 }, { "epoch": 21.90489381348107, "grad_norm": 0.4361323416233063, "learning_rate": 2.9377e-05, "loss": 0.0186, "step": 11872 }, { "epoch": 21.9067405355494, "grad_norm": 0.2915911078453064, "learning_rate": 2.9376666666666666e-05, "loss": 0.008, "step": 11873 }, { "epoch": 21.90858725761773, "grad_norm": 0.43618321418762207, "learning_rate": 2.9376333333333335e-05, "loss": 0.0159, "step": 11874 }, { "epoch": 21.910433979686058, "grad_norm": 0.2591547966003418, "learning_rate": 2.9375999999999998e-05, "loss": 0.0112, "step": 11875 }, { "epoch": 21.912280701754387, "grad_norm": 0.1294482797384262, "learning_rate": 2.9375666666666667e-05, "loss": 0.0045, "step": 11876 }, { "epoch": 21.914127423822716, "grad_norm": 0.23369941115379333, "learning_rate": 2.9375333333333337e-05, "loss": 0.0107, "step": 11877 }, { "epoch": 21.91597414589104, "grad_norm": 0.2956857979297638, "learning_rate": 2.9375e-05, "loss": 0.0092, "step": 11878 }, { "epoch": 21.91782086795937, "grad_norm": 0.21449799835681915, "learning_rate": 2.937466666666667e-05, "loss": 0.0053, "step": 11879 }, { "epoch": 21.9196675900277, "grad_norm": 0.2647559344768524, "learning_rate": 2.9374333333333334e-05, "loss": 0.0087, "step": 11880 }, { "epoch": 21.92151431209603, "grad_norm": 0.16165274381637573, "learning_rate": 2.9374e-05, "loss": 0.0032, "step": 11881 }, { "epoch": 21.92336103416436, "grad_norm": 0.5226666331291199, "learning_rate": 2.9373666666666666e-05, "loss": 0.0183, "step": 11882 }, { "epoch": 21.925207756232687, "grad_norm": 0.6835513114929199, "learning_rate": 2.9373333333333336e-05, "loss": 0.1625, "step": 11883 }, { "epoch": 21.927054478301017, "grad_norm": 0.7763351202011108, "learning_rate": 2.9373e-05, "loss": 0.1565, "step": 11884 }, { "epoch": 21.928901200369346, "grad_norm": 0.43676137924194336, "learning_rate": 2.9372666666666667e-05, "loss": 0.0685, "step": 11885 }, { "epoch": 21.930747922437675, "grad_norm": 0.47941815853118896, "learning_rate": 2.9372333333333337e-05, "loss": 0.0891, "step": 11886 }, { "epoch": 21.932594644506, "grad_norm": 0.4046361744403839, "learning_rate": 2.9372e-05, "loss": 0.0809, "step": 11887 }, { "epoch": 21.93444136657433, "grad_norm": 0.39610689878463745, "learning_rate": 2.937166666666667e-05, "loss": 0.0526, "step": 11888 }, { "epoch": 21.93628808864266, "grad_norm": 0.6374268531799316, "learning_rate": 2.9371333333333335e-05, "loss": 0.0754, "step": 11889 }, { "epoch": 21.938134810710988, "grad_norm": 0.3975745439529419, "learning_rate": 2.9371e-05, "loss": 0.0746, "step": 11890 }, { "epoch": 21.939981532779317, "grad_norm": 0.5438203811645508, "learning_rate": 2.9370666666666666e-05, "loss": 0.0442, "step": 11891 }, { "epoch": 21.941828254847646, "grad_norm": 0.883966326713562, "learning_rate": 2.9370333333333336e-05, "loss": 0.0496, "step": 11892 }, { "epoch": 21.943674976915975, "grad_norm": 0.3531246781349182, "learning_rate": 2.9370000000000002e-05, "loss": 0.0282, "step": 11893 }, { "epoch": 21.945521698984304, "grad_norm": 0.2855888307094574, "learning_rate": 2.9369666666666668e-05, "loss": 0.0433, "step": 11894 }, { "epoch": 21.94736842105263, "grad_norm": 0.3364258110523224, "learning_rate": 2.9369333333333334e-05, "loss": 0.0136, "step": 11895 }, { "epoch": 21.94921514312096, "grad_norm": 0.1887637972831726, "learning_rate": 2.9369e-05, "loss": 0.0134, "step": 11896 }, { "epoch": 21.95106186518929, "grad_norm": 0.4127003252506256, "learning_rate": 2.936866666666667e-05, "loss": 0.0344, "step": 11897 }, { "epoch": 21.952908587257618, "grad_norm": 0.22459253668785095, "learning_rate": 2.936833333333333e-05, "loss": 0.0133, "step": 11898 }, { "epoch": 21.954755309325947, "grad_norm": 0.33327922224998474, "learning_rate": 2.9368e-05, "loss": 0.0099, "step": 11899 }, { "epoch": 21.956602031394276, "grad_norm": 0.26645731925964355, "learning_rate": 2.9367666666666667e-05, "loss": 0.0152, "step": 11900 }, { "epoch": 21.958448753462605, "grad_norm": 0.34843745827674866, "learning_rate": 2.9367333333333333e-05, "loss": 0.0189, "step": 11901 }, { "epoch": 21.960295475530934, "grad_norm": 0.26357918977737427, "learning_rate": 2.9367000000000002e-05, "loss": 0.0073, "step": 11902 }, { "epoch": 21.96214219759926, "grad_norm": 0.31214573979377747, "learning_rate": 2.9366666666666668e-05, "loss": 0.0131, "step": 11903 }, { "epoch": 21.96398891966759, "grad_norm": 0.2717858850955963, "learning_rate": 2.9366333333333334e-05, "loss": 0.0073, "step": 11904 }, { "epoch": 21.965835641735918, "grad_norm": 0.16268011927604675, "learning_rate": 2.9366e-05, "loss": 0.0041, "step": 11905 }, { "epoch": 21.967682363804247, "grad_norm": 0.23915761709213257, "learning_rate": 2.936566666666667e-05, "loss": 0.0089, "step": 11906 }, { "epoch": 21.969529085872576, "grad_norm": 0.1611839383840561, "learning_rate": 2.936533333333333e-05, "loss": 0.0058, "step": 11907 }, { "epoch": 21.971375807940905, "grad_norm": 0.6211763024330139, "learning_rate": 2.9365e-05, "loss": 0.0098, "step": 11908 }, { "epoch": 21.973222530009235, "grad_norm": 0.8325933814048767, "learning_rate": 2.936466666666667e-05, "loss": 0.0118, "step": 11909 }, { "epoch": 21.975069252077564, "grad_norm": 0.17792974412441254, "learning_rate": 2.9364333333333333e-05, "loss": 0.0048, "step": 11910 }, { "epoch": 21.97691597414589, "grad_norm": 0.17959211766719818, "learning_rate": 2.9364000000000002e-05, "loss": 0.0082, "step": 11911 }, { "epoch": 21.97876269621422, "grad_norm": 0.23362372815608978, "learning_rate": 2.9363666666666668e-05, "loss": 0.0053, "step": 11912 }, { "epoch": 21.980609418282548, "grad_norm": 0.39807364344596863, "learning_rate": 2.9363333333333334e-05, "loss": 0.0072, "step": 11913 }, { "epoch": 21.982456140350877, "grad_norm": 0.411865770816803, "learning_rate": 2.9363e-05, "loss": 0.0095, "step": 11914 }, { "epoch": 21.984302862419206, "grad_norm": 0.41350361704826355, "learning_rate": 2.936266666666667e-05, "loss": 0.0128, "step": 11915 }, { "epoch": 21.986149584487535, "grad_norm": 0.24316683411598206, "learning_rate": 2.9362333333333332e-05, "loss": 0.0054, "step": 11916 }, { "epoch": 21.987996306555864, "grad_norm": 0.21313484013080597, "learning_rate": 2.9362e-05, "loss": 0.0042, "step": 11917 }, { "epoch": 21.989843028624193, "grad_norm": 0.11858999729156494, "learning_rate": 2.936166666666667e-05, "loss": 0.0046, "step": 11918 }, { "epoch": 21.991689750692522, "grad_norm": 0.13938215374946594, "learning_rate": 2.9361333333333333e-05, "loss": 0.004, "step": 11919 }, { "epoch": 21.993536472760848, "grad_norm": 0.23660410940647125, "learning_rate": 2.9361000000000002e-05, "loss": 0.0073, "step": 11920 }, { "epoch": 21.995383194829177, "grad_norm": 0.29818493127822876, "learning_rate": 2.9360666666666668e-05, "loss": 0.0094, "step": 11921 }, { "epoch": 21.997229916897506, "grad_norm": 0.2225433886051178, "learning_rate": 2.9360333333333334e-05, "loss": 0.006, "step": 11922 }, { "epoch": 21.999076638965835, "grad_norm": 0.5248429775238037, "learning_rate": 2.936e-05, "loss": 0.0093, "step": 11923 }, { "epoch": 22.0, "grad_norm": 0.4427490532398224, "learning_rate": 2.9359666666666666e-05, "loss": 0.0058, "step": 11924 }, { "epoch": 22.00184672206833, "grad_norm": 0.8038821220397949, "learning_rate": 2.9359333333333332e-05, "loss": 0.2321, "step": 11925 }, { "epoch": 22.00369344413666, "grad_norm": 0.7266762256622314, "learning_rate": 2.9359e-05, "loss": 0.1044, "step": 11926 }, { "epoch": 22.005540166204987, "grad_norm": 0.5080312490463257, "learning_rate": 2.9358666666666667e-05, "loss": 0.1741, "step": 11927 }, { "epoch": 22.007386888273317, "grad_norm": 0.4087110459804535, "learning_rate": 2.9358333333333333e-05, "loss": 0.091, "step": 11928 }, { "epoch": 22.009233610341642, "grad_norm": 0.4159901440143585, "learning_rate": 2.9358000000000003e-05, "loss": 0.0538, "step": 11929 }, { "epoch": 22.01108033240997, "grad_norm": 0.429823637008667, "learning_rate": 2.9357666666666665e-05, "loss": 0.0617, "step": 11930 }, { "epoch": 22.0129270544783, "grad_norm": 0.3844093084335327, "learning_rate": 2.9357333333333334e-05, "loss": 0.0551, "step": 11931 }, { "epoch": 22.01477377654663, "grad_norm": 0.34892767667770386, "learning_rate": 2.9357e-05, "loss": 0.0541, "step": 11932 }, { "epoch": 22.01662049861496, "grad_norm": 0.383372962474823, "learning_rate": 2.9356666666666666e-05, "loss": 0.048, "step": 11933 }, { "epoch": 22.018467220683288, "grad_norm": 0.45354166626930237, "learning_rate": 2.9356333333333336e-05, "loss": 0.0946, "step": 11934 }, { "epoch": 22.020313942751617, "grad_norm": 0.3834838569164276, "learning_rate": 2.9356e-05, "loss": 0.0244, "step": 11935 }, { "epoch": 22.022160664819946, "grad_norm": 0.2780103385448456, "learning_rate": 2.9355666666666667e-05, "loss": 0.0156, "step": 11936 }, { "epoch": 22.02400738688827, "grad_norm": 0.29520273208618164, "learning_rate": 2.9355333333333333e-05, "loss": 0.0245, "step": 11937 }, { "epoch": 22.0258541089566, "grad_norm": 0.4779583215713501, "learning_rate": 2.9355000000000003e-05, "loss": 0.0202, "step": 11938 }, { "epoch": 22.02770083102493, "grad_norm": 0.22411291301250458, "learning_rate": 2.9354666666666665e-05, "loss": 0.01, "step": 11939 }, { "epoch": 22.02954755309326, "grad_norm": 0.5006150007247925, "learning_rate": 2.9354333333333335e-05, "loss": 0.0118, "step": 11940 }, { "epoch": 22.03139427516159, "grad_norm": 0.6376142501831055, "learning_rate": 2.9354e-05, "loss": 0.0134, "step": 11941 }, { "epoch": 22.033240997229917, "grad_norm": 0.2224431335926056, "learning_rate": 2.9353666666666666e-05, "loss": 0.0117, "step": 11942 }, { "epoch": 22.035087719298247, "grad_norm": 0.24049821496009827, "learning_rate": 2.9353333333333336e-05, "loss": 0.0058, "step": 11943 }, { "epoch": 22.036934441366576, "grad_norm": 0.6087546348571777, "learning_rate": 2.9353000000000002e-05, "loss": 0.0123, "step": 11944 }, { "epoch": 22.0387811634349, "grad_norm": 0.3148014545440674, "learning_rate": 2.9352666666666668e-05, "loss": 0.0105, "step": 11945 }, { "epoch": 22.04062788550323, "grad_norm": 0.3352877199649811, "learning_rate": 2.9352333333333334e-05, "loss": 0.017, "step": 11946 }, { "epoch": 22.04247460757156, "grad_norm": 0.1324327439069748, "learning_rate": 2.9352000000000003e-05, "loss": 0.0046, "step": 11947 }, { "epoch": 22.04432132963989, "grad_norm": 0.40338218212127686, "learning_rate": 2.9351666666666665e-05, "loss": 0.0093, "step": 11948 }, { "epoch": 22.046168051708218, "grad_norm": 0.32593637704849243, "learning_rate": 2.9351333333333335e-05, "loss": 0.0119, "step": 11949 }, { "epoch": 22.048014773776547, "grad_norm": 0.0924089178442955, "learning_rate": 2.9351e-05, "loss": 0.0031, "step": 11950 }, { "epoch": 22.049861495844876, "grad_norm": 0.3839172124862671, "learning_rate": 2.9350666666666667e-05, "loss": 0.0126, "step": 11951 }, { "epoch": 22.051708217913205, "grad_norm": 0.1773180216550827, "learning_rate": 2.9350333333333336e-05, "loss": 0.004, "step": 11952 }, { "epoch": 22.053554939981534, "grad_norm": 0.23860660195350647, "learning_rate": 2.9350000000000002e-05, "loss": 0.0068, "step": 11953 }, { "epoch": 22.05540166204986, "grad_norm": 0.3832785487174988, "learning_rate": 2.9349666666666668e-05, "loss": 0.0615, "step": 11954 }, { "epoch": 22.05724838411819, "grad_norm": 1.8486205339431763, "learning_rate": 2.9349333333333334e-05, "loss": 0.0121, "step": 11955 }, { "epoch": 22.05909510618652, "grad_norm": 0.26004332304000854, "learning_rate": 2.9349e-05, "loss": 0.0065, "step": 11956 }, { "epoch": 22.060941828254848, "grad_norm": 0.35053035616874695, "learning_rate": 2.9348666666666666e-05, "loss": 0.0068, "step": 11957 }, { "epoch": 22.062788550323177, "grad_norm": 0.14764510095119476, "learning_rate": 2.9348333333333335e-05, "loss": 0.006, "step": 11958 }, { "epoch": 22.064635272391506, "grad_norm": 0.3242139220237732, "learning_rate": 2.9348e-05, "loss": 0.0076, "step": 11959 }, { "epoch": 22.066481994459835, "grad_norm": 0.2089972198009491, "learning_rate": 2.9347666666666667e-05, "loss": 0.0058, "step": 11960 }, { "epoch": 22.068328716528164, "grad_norm": 0.7421181797981262, "learning_rate": 2.9347333333333336e-05, "loss": 0.009, "step": 11961 }, { "epoch": 22.07017543859649, "grad_norm": 0.11591432243585587, "learning_rate": 2.9347e-05, "loss": 0.0026, "step": 11962 }, { "epoch": 22.07202216066482, "grad_norm": 0.4323197901248932, "learning_rate": 2.9346666666666668e-05, "loss": 0.0148, "step": 11963 }, { "epoch": 22.073868882733148, "grad_norm": 0.3520541489124298, "learning_rate": 2.9346333333333334e-05, "loss": 0.0121, "step": 11964 }, { "epoch": 22.075715604801477, "grad_norm": 0.3193742334842682, "learning_rate": 2.9346e-05, "loss": 0.0059, "step": 11965 }, { "epoch": 22.077562326869806, "grad_norm": 0.21015574038028717, "learning_rate": 2.9345666666666666e-05, "loss": 0.0057, "step": 11966 }, { "epoch": 22.079409048938135, "grad_norm": 1.1927063465118408, "learning_rate": 2.9345333333333335e-05, "loss": 0.0139, "step": 11967 }, { "epoch": 22.081255771006465, "grad_norm": 0.2794034481048584, "learning_rate": 2.9345e-05, "loss": 0.0066, "step": 11968 }, { "epoch": 22.083102493074794, "grad_norm": 0.16369950771331787, "learning_rate": 2.9344666666666667e-05, "loss": 0.0024, "step": 11969 }, { "epoch": 22.08494921514312, "grad_norm": 0.6939327716827393, "learning_rate": 2.9344333333333336e-05, "loss": 0.0132, "step": 11970 }, { "epoch": 22.08679593721145, "grad_norm": 0.5204373002052307, "learning_rate": 2.9344e-05, "loss": 0.0091, "step": 11971 }, { "epoch": 22.088642659279778, "grad_norm": 0.6986927390098572, "learning_rate": 2.9343666666666668e-05, "loss": 0.0167, "step": 11972 }, { "epoch": 22.090489381348107, "grad_norm": 0.2969571053981781, "learning_rate": 2.9343333333333334e-05, "loss": 0.0136, "step": 11973 }, { "epoch": 22.092336103416436, "grad_norm": 0.49085596203804016, "learning_rate": 2.9343e-05, "loss": 0.0067, "step": 11974 }, { "epoch": 22.094182825484765, "grad_norm": 0.610810399055481, "learning_rate": 2.9342666666666666e-05, "loss": 0.1319, "step": 11975 }, { "epoch": 22.096029547553094, "grad_norm": 0.5385555028915405, "learning_rate": 2.9342333333333335e-05, "loss": 0.1277, "step": 11976 }, { "epoch": 22.097876269621423, "grad_norm": 0.4245043098926544, "learning_rate": 2.9342e-05, "loss": 0.0807, "step": 11977 }, { "epoch": 22.099722991689752, "grad_norm": 0.5050533413887024, "learning_rate": 2.9341666666666667e-05, "loss": 0.0656, "step": 11978 }, { "epoch": 22.101569713758078, "grad_norm": 0.5482106804847717, "learning_rate": 2.9341333333333337e-05, "loss": 0.0574, "step": 11979 }, { "epoch": 22.103416435826407, "grad_norm": 0.4365978538990021, "learning_rate": 2.9341e-05, "loss": 0.0619, "step": 11980 }, { "epoch": 22.105263157894736, "grad_norm": 0.4204999506473541, "learning_rate": 2.934066666666667e-05, "loss": 0.0534, "step": 11981 }, { "epoch": 22.107109879963065, "grad_norm": 0.44639596343040466, "learning_rate": 2.9340333333333334e-05, "loss": 0.0629, "step": 11982 }, { "epoch": 22.108956602031395, "grad_norm": 0.37912020087242126, "learning_rate": 2.934e-05, "loss": 0.0423, "step": 11983 }, { "epoch": 22.110803324099724, "grad_norm": 0.4707135260105133, "learning_rate": 2.933966666666667e-05, "loss": 0.0384, "step": 11984 }, { "epoch": 22.112650046168053, "grad_norm": 0.3049370348453522, "learning_rate": 2.9339333333333332e-05, "loss": 0.0268, "step": 11985 }, { "epoch": 22.114496768236382, "grad_norm": 0.5002748370170593, "learning_rate": 2.9339e-05, "loss": 0.0265, "step": 11986 }, { "epoch": 22.116343490304708, "grad_norm": 0.2719067633152008, "learning_rate": 2.9338666666666667e-05, "loss": 0.0373, "step": 11987 }, { "epoch": 22.118190212373037, "grad_norm": 0.4024271070957184, "learning_rate": 2.9338333333333333e-05, "loss": 0.034, "step": 11988 }, { "epoch": 22.120036934441366, "grad_norm": 0.7035671472549438, "learning_rate": 2.9338e-05, "loss": 0.0112, "step": 11989 }, { "epoch": 22.121883656509695, "grad_norm": 0.9298180937767029, "learning_rate": 2.933766666666667e-05, "loss": 0.0209, "step": 11990 }, { "epoch": 22.123730378578024, "grad_norm": 0.27738744020462036, "learning_rate": 2.933733333333333e-05, "loss": 0.0384, "step": 11991 }, { "epoch": 22.125577100646353, "grad_norm": 0.554492712020874, "learning_rate": 2.9337e-05, "loss": 0.016, "step": 11992 }, { "epoch": 22.127423822714682, "grad_norm": 0.19704753160476685, "learning_rate": 2.933666666666667e-05, "loss": 0.0064, "step": 11993 }, { "epoch": 22.12927054478301, "grad_norm": 0.24221715331077576, "learning_rate": 2.9336333333333332e-05, "loss": 0.0455, "step": 11994 }, { "epoch": 22.131117266851337, "grad_norm": 0.25139182806015015, "learning_rate": 2.9336000000000002e-05, "loss": 0.0084, "step": 11995 }, { "epoch": 22.132963988919666, "grad_norm": 0.18211950361728668, "learning_rate": 2.9335666666666668e-05, "loss": 0.0073, "step": 11996 }, { "epoch": 22.134810710987995, "grad_norm": 0.2991064786911011, "learning_rate": 2.9335333333333334e-05, "loss": 0.0063, "step": 11997 }, { "epoch": 22.136657433056325, "grad_norm": 0.1840602308511734, "learning_rate": 2.9335e-05, "loss": 0.0088, "step": 11998 }, { "epoch": 22.138504155124654, "grad_norm": 0.25436100363731384, "learning_rate": 2.933466666666667e-05, "loss": 0.005, "step": 11999 }, { "epoch": 22.140350877192983, "grad_norm": 0.16578309237957, "learning_rate": 2.933433333333333e-05, "loss": 0.0057, "step": 12000 }, { "epoch": 22.140350877192983, "eval_cer": 0.11143945874484797, "eval_loss": 0.3457863926887512, "eval_runtime": 16.4823, "eval_samples_per_second": 18.444, "eval_steps_per_second": 0.607, "eval_wer": 0.38468917881811204, "step": 12000 }, { "epoch": 22.142197599261312, "grad_norm": 0.1465679258108139, "learning_rate": 2.9334e-05, "loss": 0.0074, "step": 12001 }, { "epoch": 22.14404432132964, "grad_norm": 0.35484927892684937, "learning_rate": 2.933366666666667e-05, "loss": 0.0123, "step": 12002 }, { "epoch": 22.14589104339797, "grad_norm": 0.22260144352912903, "learning_rate": 2.9333333333333333e-05, "loss": 0.0095, "step": 12003 }, { "epoch": 22.147737765466296, "grad_norm": 0.13370046019554138, "learning_rate": 2.9333000000000002e-05, "loss": 0.0053, "step": 12004 }, { "epoch": 22.149584487534625, "grad_norm": 0.585996150970459, "learning_rate": 2.9332666666666668e-05, "loss": 0.007, "step": 12005 }, { "epoch": 22.151431209602954, "grad_norm": 0.16085104644298553, "learning_rate": 2.9332333333333334e-05, "loss": 0.0037, "step": 12006 }, { "epoch": 22.153277931671283, "grad_norm": 0.2741457223892212, "learning_rate": 2.9332e-05, "loss": 0.0094, "step": 12007 }, { "epoch": 22.155124653739612, "grad_norm": 0.35082828998565674, "learning_rate": 2.933166666666667e-05, "loss": 0.0087, "step": 12008 }, { "epoch": 22.15697137580794, "grad_norm": 0.7393125295639038, "learning_rate": 2.9331333333333335e-05, "loss": 0.0208, "step": 12009 }, { "epoch": 22.15881809787627, "grad_norm": 0.37203606963157654, "learning_rate": 2.9331e-05, "loss": 0.0075, "step": 12010 }, { "epoch": 22.1606648199446, "grad_norm": 1.1032886505126953, "learning_rate": 2.933066666666667e-05, "loss": 0.0181, "step": 12011 }, { "epoch": 22.162511542012926, "grad_norm": 0.24382264912128448, "learning_rate": 2.9330333333333333e-05, "loss": 0.0086, "step": 12012 }, { "epoch": 22.164358264081255, "grad_norm": 0.20161493122577667, "learning_rate": 2.9330000000000002e-05, "loss": 0.0069, "step": 12013 }, { "epoch": 22.166204986149584, "grad_norm": 0.6993123292922974, "learning_rate": 2.9329666666666668e-05, "loss": 0.0114, "step": 12014 }, { "epoch": 22.168051708217913, "grad_norm": 0.6505698561668396, "learning_rate": 2.9329333333333334e-05, "loss": 0.0132, "step": 12015 }, { "epoch": 22.169898430286242, "grad_norm": 0.21036770939826965, "learning_rate": 2.9329e-05, "loss": 0.0067, "step": 12016 }, { "epoch": 22.17174515235457, "grad_norm": 0.3262076675891876, "learning_rate": 2.9328666666666666e-05, "loss": 0.0095, "step": 12017 }, { "epoch": 22.1735918744229, "grad_norm": 0.42080506682395935, "learning_rate": 2.9328333333333335e-05, "loss": 0.0061, "step": 12018 }, { "epoch": 22.17543859649123, "grad_norm": 0.38495534658432007, "learning_rate": 2.9328e-05, "loss": 0.0161, "step": 12019 }, { "epoch": 22.177285318559555, "grad_norm": 0.31718382239341736, "learning_rate": 2.9327666666666667e-05, "loss": 0.0072, "step": 12020 }, { "epoch": 22.179132040627884, "grad_norm": 0.19104288518428802, "learning_rate": 2.9327333333333333e-05, "loss": 0.0053, "step": 12021 }, { "epoch": 22.180978762696213, "grad_norm": 0.38349059224128723, "learning_rate": 2.9327000000000002e-05, "loss": 0.0101, "step": 12022 }, { "epoch": 22.182825484764543, "grad_norm": 0.9522183537483215, "learning_rate": 2.9326666666666665e-05, "loss": 0.0163, "step": 12023 }, { "epoch": 22.18467220683287, "grad_norm": 0.3559163510799408, "learning_rate": 2.9326333333333334e-05, "loss": 0.0066, "step": 12024 }, { "epoch": 22.1865189289012, "grad_norm": 0.6288875341415405, "learning_rate": 2.9326e-05, "loss": 0.1693, "step": 12025 }, { "epoch": 22.18836565096953, "grad_norm": 0.6415272951126099, "learning_rate": 2.9325666666666666e-05, "loss": 0.1282, "step": 12026 }, { "epoch": 22.19021237303786, "grad_norm": 0.4664081931114197, "learning_rate": 2.9325333333333335e-05, "loss": 0.092, "step": 12027 }, { "epoch": 22.19205909510619, "grad_norm": 0.8116239309310913, "learning_rate": 2.9325e-05, "loss": 0.1315, "step": 12028 }, { "epoch": 22.193905817174514, "grad_norm": 0.4563292860984802, "learning_rate": 2.9324666666666667e-05, "loss": 0.0755, "step": 12029 }, { "epoch": 22.195752539242843, "grad_norm": 0.5227322578430176, "learning_rate": 2.9324333333333333e-05, "loss": 0.0758, "step": 12030 }, { "epoch": 22.197599261311172, "grad_norm": 0.438588410615921, "learning_rate": 2.9324000000000002e-05, "loss": 0.0496, "step": 12031 }, { "epoch": 22.1994459833795, "grad_norm": 0.43619734048843384, "learning_rate": 2.9323666666666665e-05, "loss": 0.0529, "step": 12032 }, { "epoch": 22.20129270544783, "grad_norm": 0.2944128215312958, "learning_rate": 2.9323333333333334e-05, "loss": 0.0584, "step": 12033 }, { "epoch": 22.20313942751616, "grad_norm": 0.4487317204475403, "learning_rate": 2.9323000000000004e-05, "loss": 0.0547, "step": 12034 }, { "epoch": 22.20498614958449, "grad_norm": 0.5154827833175659, "learning_rate": 2.9322666666666666e-05, "loss": 0.0434, "step": 12035 }, { "epoch": 22.206832871652818, "grad_norm": 1.1755114793777466, "learning_rate": 2.9322333333333336e-05, "loss": 0.0644, "step": 12036 }, { "epoch": 22.208679593721143, "grad_norm": 0.29100021719932556, "learning_rate": 2.9322e-05, "loss": 0.0423, "step": 12037 }, { "epoch": 22.210526315789473, "grad_norm": 0.4555946886539459, "learning_rate": 2.9321666666666667e-05, "loss": 0.0637, "step": 12038 }, { "epoch": 22.2123730378578, "grad_norm": 0.49455103278160095, "learning_rate": 2.9321333333333333e-05, "loss": 0.025, "step": 12039 }, { "epoch": 22.21421975992613, "grad_norm": 0.28613951802253723, "learning_rate": 2.9321000000000003e-05, "loss": 0.0136, "step": 12040 }, { "epoch": 22.21606648199446, "grad_norm": 0.2727801501750946, "learning_rate": 2.9320666666666665e-05, "loss": 0.0154, "step": 12041 }, { "epoch": 22.21791320406279, "grad_norm": 0.3077845871448517, "learning_rate": 2.9320333333333335e-05, "loss": 0.0201, "step": 12042 }, { "epoch": 22.21975992613112, "grad_norm": 0.19647696614265442, "learning_rate": 2.9320000000000004e-05, "loss": 0.008, "step": 12043 }, { "epoch": 22.221606648199447, "grad_norm": 0.297075092792511, "learning_rate": 2.9319666666666666e-05, "loss": 0.0082, "step": 12044 }, { "epoch": 22.223453370267773, "grad_norm": 1.275119662284851, "learning_rate": 2.9319333333333336e-05, "loss": 0.0216, "step": 12045 }, { "epoch": 22.225300092336102, "grad_norm": 0.4337138235569, "learning_rate": 2.9318999999999998e-05, "loss": 0.0153, "step": 12046 }, { "epoch": 22.22714681440443, "grad_norm": 0.2728216350078583, "learning_rate": 2.9318666666666668e-05, "loss": 0.0084, "step": 12047 }, { "epoch": 22.22899353647276, "grad_norm": 0.33013466000556946, "learning_rate": 2.9318333333333334e-05, "loss": 0.0157, "step": 12048 }, { "epoch": 22.23084025854109, "grad_norm": 0.19151604175567627, "learning_rate": 2.9318e-05, "loss": 0.0073, "step": 12049 }, { "epoch": 22.23268698060942, "grad_norm": 0.2966844141483307, "learning_rate": 2.9317666666666665e-05, "loss": 0.0093, "step": 12050 }, { "epoch": 22.234533702677748, "grad_norm": 0.33475998044013977, "learning_rate": 2.9317333333333335e-05, "loss": 0.009, "step": 12051 }, { "epoch": 22.236380424746077, "grad_norm": 0.16185058653354645, "learning_rate": 2.9317e-05, "loss": 0.0049, "step": 12052 }, { "epoch": 22.238227146814406, "grad_norm": 0.25134894251823425, "learning_rate": 2.9316666666666667e-05, "loss": 0.0105, "step": 12053 }, { "epoch": 22.24007386888273, "grad_norm": 0.21591123938560486, "learning_rate": 2.9316333333333336e-05, "loss": 0.0085, "step": 12054 }, { "epoch": 22.24192059095106, "grad_norm": 0.36200016736984253, "learning_rate": 2.9316e-05, "loss": 0.007, "step": 12055 }, { "epoch": 22.24376731301939, "grad_norm": 0.20390160381793976, "learning_rate": 2.9315666666666668e-05, "loss": 0.0078, "step": 12056 }, { "epoch": 22.24561403508772, "grad_norm": 0.5636849999427795, "learning_rate": 2.9315333333333334e-05, "loss": 0.0074, "step": 12057 }, { "epoch": 22.24746075715605, "grad_norm": 0.3945927619934082, "learning_rate": 2.9315e-05, "loss": 0.0136, "step": 12058 }, { "epoch": 22.249307479224377, "grad_norm": 0.20777307450771332, "learning_rate": 2.931466666666667e-05, "loss": 0.0047, "step": 12059 }, { "epoch": 22.251154201292707, "grad_norm": 0.7784906029701233, "learning_rate": 2.9314333333333335e-05, "loss": 0.0208, "step": 12060 }, { "epoch": 22.253000923361036, "grad_norm": 0.16684590280056, "learning_rate": 2.9314e-05, "loss": 0.0034, "step": 12061 }, { "epoch": 22.25484764542936, "grad_norm": 0.2453305870294571, "learning_rate": 2.9313666666666667e-05, "loss": 0.0059, "step": 12062 }, { "epoch": 22.25669436749769, "grad_norm": 0.4265834093093872, "learning_rate": 2.9313333333333336e-05, "loss": 0.0111, "step": 12063 }, { "epoch": 22.25854108956602, "grad_norm": 0.25547710061073303, "learning_rate": 2.9313e-05, "loss": 0.0041, "step": 12064 }, { "epoch": 22.26038781163435, "grad_norm": 0.6966615319252014, "learning_rate": 2.9312666666666668e-05, "loss": 0.0074, "step": 12065 }, { "epoch": 22.262234533702678, "grad_norm": 0.23735538125038147, "learning_rate": 2.9312333333333334e-05, "loss": 0.0038, "step": 12066 }, { "epoch": 22.264081255771007, "grad_norm": 0.24145880341529846, "learning_rate": 2.9312e-05, "loss": 0.0062, "step": 12067 }, { "epoch": 22.265927977839336, "grad_norm": 0.23477429151535034, "learning_rate": 2.931166666666667e-05, "loss": 0.0054, "step": 12068 }, { "epoch": 22.267774699907665, "grad_norm": 0.22543811798095703, "learning_rate": 2.9311333333333335e-05, "loss": 0.0115, "step": 12069 }, { "epoch": 22.26962142197599, "grad_norm": 0.21335336565971375, "learning_rate": 2.9311e-05, "loss": 0.0071, "step": 12070 }, { "epoch": 22.27146814404432, "grad_norm": 0.4495827853679657, "learning_rate": 2.9310666666666667e-05, "loss": 0.0107, "step": 12071 }, { "epoch": 22.27331486611265, "grad_norm": 0.5423740744590759, "learning_rate": 2.9310333333333336e-05, "loss": 0.0125, "step": 12072 }, { "epoch": 22.27516158818098, "grad_norm": 0.2808704376220703, "learning_rate": 2.931e-05, "loss": 0.0047, "step": 12073 }, { "epoch": 22.277008310249307, "grad_norm": 0.40028220415115356, "learning_rate": 2.9309666666666668e-05, "loss": 0.0144, "step": 12074 }, { "epoch": 22.278855032317637, "grad_norm": 0.6234591603279114, "learning_rate": 2.930933333333333e-05, "loss": 0.1581, "step": 12075 }, { "epoch": 22.280701754385966, "grad_norm": 0.5651040077209473, "learning_rate": 2.9309e-05, "loss": 0.1046, "step": 12076 }, { "epoch": 22.282548476454295, "grad_norm": 1.1081950664520264, "learning_rate": 2.930866666666667e-05, "loss": 0.1222, "step": 12077 }, { "epoch": 22.284395198522624, "grad_norm": 0.39549520611763, "learning_rate": 2.9308333333333332e-05, "loss": 0.0584, "step": 12078 }, { "epoch": 22.28624192059095, "grad_norm": 0.49096477031707764, "learning_rate": 2.9308e-05, "loss": 0.1001, "step": 12079 }, { "epoch": 22.28808864265928, "grad_norm": 0.3321293592453003, "learning_rate": 2.9307666666666667e-05, "loss": 0.0656, "step": 12080 }, { "epoch": 22.289935364727608, "grad_norm": 0.39119619131088257, "learning_rate": 2.9307333333333333e-05, "loss": 0.0507, "step": 12081 }, { "epoch": 22.291782086795937, "grad_norm": 0.6798700094223022, "learning_rate": 2.9307e-05, "loss": 0.052, "step": 12082 }, { "epoch": 22.293628808864266, "grad_norm": 0.4221579432487488, "learning_rate": 2.930666666666667e-05, "loss": 0.0662, "step": 12083 }, { "epoch": 22.295475530932595, "grad_norm": 0.33786314725875854, "learning_rate": 2.9306333333333334e-05, "loss": 0.0566, "step": 12084 }, { "epoch": 22.297322253000925, "grad_norm": 0.2803206741809845, "learning_rate": 2.9306e-05, "loss": 0.0293, "step": 12085 }, { "epoch": 22.299168975069254, "grad_norm": 0.5887284874916077, "learning_rate": 2.930566666666667e-05, "loss": 0.0489, "step": 12086 }, { "epoch": 22.30101569713758, "grad_norm": 0.4942704439163208, "learning_rate": 2.9305333333333332e-05, "loss": 0.0543, "step": 12087 }, { "epoch": 22.30286241920591, "grad_norm": 0.34390565752983093, "learning_rate": 2.9305e-05, "loss": 0.0362, "step": 12088 }, { "epoch": 22.304709141274238, "grad_norm": 0.3027403950691223, "learning_rate": 2.9304666666666667e-05, "loss": 0.0288, "step": 12089 }, { "epoch": 22.306555863342567, "grad_norm": 0.334240198135376, "learning_rate": 2.9304333333333333e-05, "loss": 0.013, "step": 12090 }, { "epoch": 22.308402585410896, "grad_norm": 0.2725203335285187, "learning_rate": 2.9304e-05, "loss": 0.0095, "step": 12091 }, { "epoch": 22.310249307479225, "grad_norm": 0.3006215989589691, "learning_rate": 2.930366666666667e-05, "loss": 0.0138, "step": 12092 }, { "epoch": 22.312096029547554, "grad_norm": 0.22081726789474487, "learning_rate": 2.9303333333333335e-05, "loss": 0.0087, "step": 12093 }, { "epoch": 22.313942751615883, "grad_norm": 0.2759893238544464, "learning_rate": 2.9303e-05, "loss": 0.0117, "step": 12094 }, { "epoch": 22.31578947368421, "grad_norm": 0.2866109311580658, "learning_rate": 2.930266666666667e-05, "loss": 0.0095, "step": 12095 }, { "epoch": 22.317636195752538, "grad_norm": 0.14103403687477112, "learning_rate": 2.9302333333333332e-05, "loss": 0.0044, "step": 12096 }, { "epoch": 22.319482917820867, "grad_norm": 0.2929133474826813, "learning_rate": 2.9302e-05, "loss": 0.0256, "step": 12097 }, { "epoch": 22.321329639889196, "grad_norm": 0.1882898211479187, "learning_rate": 2.9301666666666668e-05, "loss": 0.0093, "step": 12098 }, { "epoch": 22.323176361957525, "grad_norm": 0.16777978837490082, "learning_rate": 2.9301333333333334e-05, "loss": 0.0093, "step": 12099 }, { "epoch": 22.325023084025855, "grad_norm": 0.1872076392173767, "learning_rate": 2.9301e-05, "loss": 0.0081, "step": 12100 }, { "epoch": 22.326869806094184, "grad_norm": 0.49100226163864136, "learning_rate": 2.930066666666667e-05, "loss": 0.0141, "step": 12101 }, { "epoch": 22.328716528162513, "grad_norm": 0.21988950669765472, "learning_rate": 2.9300333333333335e-05, "loss": 0.035, "step": 12102 }, { "epoch": 22.330563250230842, "grad_norm": 0.21322642266750336, "learning_rate": 2.93e-05, "loss": 0.0083, "step": 12103 }, { "epoch": 22.332409972299168, "grad_norm": 0.28099849820137024, "learning_rate": 2.929966666666667e-05, "loss": 0.0067, "step": 12104 }, { "epoch": 22.334256694367497, "grad_norm": 0.13967318832874298, "learning_rate": 2.9299333333333333e-05, "loss": 0.004, "step": 12105 }, { "epoch": 22.336103416435826, "grad_norm": 0.38747766613960266, "learning_rate": 2.9299000000000002e-05, "loss": 0.0175, "step": 12106 }, { "epoch": 22.337950138504155, "grad_norm": 0.1795417219400406, "learning_rate": 2.9298666666666664e-05, "loss": 0.0046, "step": 12107 }, { "epoch": 22.339796860572484, "grad_norm": 0.21303912997245789, "learning_rate": 2.9298333333333334e-05, "loss": 0.0058, "step": 12108 }, { "epoch": 22.341643582640813, "grad_norm": 0.1777993142604828, "learning_rate": 2.9298000000000003e-05, "loss": 0.0056, "step": 12109 }, { "epoch": 22.343490304709142, "grad_norm": 0.09992282837629318, "learning_rate": 2.9297666666666666e-05, "loss": 0.0026, "step": 12110 }, { "epoch": 22.34533702677747, "grad_norm": 0.34188637137413025, "learning_rate": 2.9297333333333335e-05, "loss": 0.0091, "step": 12111 }, { "epoch": 22.347183748845797, "grad_norm": 0.2958120107650757, "learning_rate": 2.9297e-05, "loss": 0.0076, "step": 12112 }, { "epoch": 22.349030470914126, "grad_norm": 0.27804356813430786, "learning_rate": 2.9296666666666667e-05, "loss": 0.0065, "step": 12113 }, { "epoch": 22.350877192982455, "grad_norm": 0.23710592091083527, "learning_rate": 2.9296333333333333e-05, "loss": 0.0056, "step": 12114 }, { "epoch": 22.352723915050785, "grad_norm": 0.17549476027488708, "learning_rate": 2.9296000000000002e-05, "loss": 0.0044, "step": 12115 }, { "epoch": 22.354570637119114, "grad_norm": 0.15604440867900848, "learning_rate": 2.9295666666666665e-05, "loss": 0.0046, "step": 12116 }, { "epoch": 22.356417359187443, "grad_norm": 0.3873245120048523, "learning_rate": 2.9295333333333334e-05, "loss": 0.009, "step": 12117 }, { "epoch": 22.358264081255772, "grad_norm": 0.22579556703567505, "learning_rate": 2.9295000000000003e-05, "loss": 0.0049, "step": 12118 }, { "epoch": 22.3601108033241, "grad_norm": 0.5002829432487488, "learning_rate": 2.9294666666666666e-05, "loss": 0.0226, "step": 12119 }, { "epoch": 22.361957525392427, "grad_norm": 0.5181460380554199, "learning_rate": 2.9294333333333335e-05, "loss": 0.0115, "step": 12120 }, { "epoch": 22.363804247460756, "grad_norm": 0.27780306339263916, "learning_rate": 2.9294e-05, "loss": 0.0215, "step": 12121 }, { "epoch": 22.365650969529085, "grad_norm": 0.4311021566390991, "learning_rate": 2.9293666666666667e-05, "loss": 0.006, "step": 12122 }, { "epoch": 22.367497691597414, "grad_norm": 0.30893686413764954, "learning_rate": 2.9293333333333333e-05, "loss": 0.0045, "step": 12123 }, { "epoch": 22.369344413665743, "grad_norm": 0.26298242807388306, "learning_rate": 2.9293000000000002e-05, "loss": 0.0069, "step": 12124 }, { "epoch": 22.371191135734072, "grad_norm": 0.6145325899124146, "learning_rate": 2.9292666666666665e-05, "loss": 0.146, "step": 12125 }, { "epoch": 22.3730378578024, "grad_norm": 0.551490068435669, "learning_rate": 2.9292333333333334e-05, "loss": 0.1247, "step": 12126 }, { "epoch": 22.37488457987073, "grad_norm": 0.4913572072982788, "learning_rate": 2.9292000000000003e-05, "loss": 0.0839, "step": 12127 }, { "epoch": 22.37673130193906, "grad_norm": 0.4714609682559967, "learning_rate": 2.9291666666666666e-05, "loss": 0.0949, "step": 12128 }, { "epoch": 22.378578024007385, "grad_norm": 0.5117810964584351, "learning_rate": 2.9291333333333335e-05, "loss": 0.0748, "step": 12129 }, { "epoch": 22.380424746075715, "grad_norm": 0.5069825053215027, "learning_rate": 2.9291e-05, "loss": 0.0757, "step": 12130 }, { "epoch": 22.382271468144044, "grad_norm": 0.39257165789604187, "learning_rate": 2.9290666666666667e-05, "loss": 0.0428, "step": 12131 }, { "epoch": 22.384118190212373, "grad_norm": 0.3679068386554718, "learning_rate": 2.9290333333333333e-05, "loss": 0.0406, "step": 12132 }, { "epoch": 22.385964912280702, "grad_norm": 0.5544233322143555, "learning_rate": 2.9290000000000002e-05, "loss": 0.0572, "step": 12133 }, { "epoch": 22.38781163434903, "grad_norm": 0.37580254673957825, "learning_rate": 2.928966666666667e-05, "loss": 0.0317, "step": 12134 }, { "epoch": 22.38965835641736, "grad_norm": 0.3190949261188507, "learning_rate": 2.9289333333333334e-05, "loss": 0.0368, "step": 12135 }, { "epoch": 22.39150507848569, "grad_norm": 0.8851911425590515, "learning_rate": 2.9289e-05, "loss": 0.0398, "step": 12136 }, { "epoch": 22.393351800554015, "grad_norm": 0.27544206380844116, "learning_rate": 2.9288666666666666e-05, "loss": 0.0435, "step": 12137 }, { "epoch": 22.395198522622344, "grad_norm": 0.23330990970134735, "learning_rate": 2.9288333333333336e-05, "loss": 0.0127, "step": 12138 }, { "epoch": 22.397045244690673, "grad_norm": 0.3330238461494446, "learning_rate": 2.9287999999999998e-05, "loss": 0.0211, "step": 12139 }, { "epoch": 22.398891966759003, "grad_norm": 0.3399524390697479, "learning_rate": 2.9287666666666667e-05, "loss": 0.0272, "step": 12140 }, { "epoch": 22.40073868882733, "grad_norm": 0.23550830781459808, "learning_rate": 2.9287333333333333e-05, "loss": 0.009, "step": 12141 }, { "epoch": 22.40258541089566, "grad_norm": 0.2656426429748535, "learning_rate": 2.9287e-05, "loss": 0.0082, "step": 12142 }, { "epoch": 22.40443213296399, "grad_norm": 0.23883987963199615, "learning_rate": 2.928666666666667e-05, "loss": 0.0256, "step": 12143 }, { "epoch": 22.40627885503232, "grad_norm": 0.29757219552993774, "learning_rate": 2.9286333333333335e-05, "loss": 0.0359, "step": 12144 }, { "epoch": 22.408125577100645, "grad_norm": 0.4060526192188263, "learning_rate": 2.9286e-05, "loss": 0.0257, "step": 12145 }, { "epoch": 22.409972299168974, "grad_norm": 0.7830715775489807, "learning_rate": 2.9285666666666666e-05, "loss": 0.0142, "step": 12146 }, { "epoch": 22.411819021237303, "grad_norm": 0.22146490216255188, "learning_rate": 2.9285333333333336e-05, "loss": 0.011, "step": 12147 }, { "epoch": 22.413665743305632, "grad_norm": 0.31865814328193665, "learning_rate": 2.9284999999999998e-05, "loss": 0.013, "step": 12148 }, { "epoch": 22.41551246537396, "grad_norm": 0.15901601314544678, "learning_rate": 2.9284666666666668e-05, "loss": 0.005, "step": 12149 }, { "epoch": 22.41735918744229, "grad_norm": 0.3411862850189209, "learning_rate": 2.9284333333333334e-05, "loss": 0.0087, "step": 12150 }, { "epoch": 22.41920590951062, "grad_norm": 0.40633055567741394, "learning_rate": 2.9284e-05, "loss": 0.0125, "step": 12151 }, { "epoch": 22.42105263157895, "grad_norm": 0.6341411471366882, "learning_rate": 2.928366666666667e-05, "loss": 0.0102, "step": 12152 }, { "epoch": 22.422899353647278, "grad_norm": 0.18682046234607697, "learning_rate": 2.9283333333333335e-05, "loss": 0.007, "step": 12153 }, { "epoch": 22.424746075715603, "grad_norm": 0.17826856672763824, "learning_rate": 2.9283e-05, "loss": 0.0052, "step": 12154 }, { "epoch": 22.426592797783933, "grad_norm": 0.22934889793395996, "learning_rate": 2.9282666666666667e-05, "loss": 0.0055, "step": 12155 }, { "epoch": 22.42843951985226, "grad_norm": 0.20013564825057983, "learning_rate": 2.9282333333333336e-05, "loss": 0.0066, "step": 12156 }, { "epoch": 22.43028624192059, "grad_norm": 0.31481319665908813, "learning_rate": 2.9282e-05, "loss": 0.0065, "step": 12157 }, { "epoch": 22.43213296398892, "grad_norm": 0.24155759811401367, "learning_rate": 2.9281666666666668e-05, "loss": 0.0077, "step": 12158 }, { "epoch": 22.43397968605725, "grad_norm": 0.3853037655353546, "learning_rate": 2.9281333333333337e-05, "loss": 0.0069, "step": 12159 }, { "epoch": 22.43582640812558, "grad_norm": 0.42370277643203735, "learning_rate": 2.9281e-05, "loss": 0.009, "step": 12160 }, { "epoch": 22.437673130193907, "grad_norm": 0.27092936635017395, "learning_rate": 2.928066666666667e-05, "loss": 0.0079, "step": 12161 }, { "epoch": 22.439519852262233, "grad_norm": 0.27856534719467163, "learning_rate": 2.9280333333333335e-05, "loss": 0.0111, "step": 12162 }, { "epoch": 22.441366574330562, "grad_norm": 1.4294368028640747, "learning_rate": 2.928e-05, "loss": 0.0193, "step": 12163 }, { "epoch": 22.44321329639889, "grad_norm": 0.217570498585701, "learning_rate": 2.9279666666666667e-05, "loss": 0.0051, "step": 12164 }, { "epoch": 22.44506001846722, "grad_norm": 0.3201679587364197, "learning_rate": 2.9279333333333336e-05, "loss": 0.0093, "step": 12165 }, { "epoch": 22.44690674053555, "grad_norm": 0.0861976221203804, "learning_rate": 2.9279e-05, "loss": 0.0029, "step": 12166 }, { "epoch": 22.44875346260388, "grad_norm": 0.27611875534057617, "learning_rate": 2.9278666666666668e-05, "loss": 0.0094, "step": 12167 }, { "epoch": 22.450600184672208, "grad_norm": 0.5227705240249634, "learning_rate": 2.9278333333333334e-05, "loss": 0.0118, "step": 12168 }, { "epoch": 22.452446906740537, "grad_norm": 0.28735342621803284, "learning_rate": 2.9278e-05, "loss": 0.0103, "step": 12169 }, { "epoch": 22.454293628808863, "grad_norm": 0.2518790066242218, "learning_rate": 2.927766666666667e-05, "loss": 0.006, "step": 12170 }, { "epoch": 22.45614035087719, "grad_norm": 0.21540474891662598, "learning_rate": 2.9277333333333332e-05, "loss": 0.0067, "step": 12171 }, { "epoch": 22.45798707294552, "grad_norm": 0.17142251133918762, "learning_rate": 2.9277e-05, "loss": 0.005, "step": 12172 }, { "epoch": 22.45983379501385, "grad_norm": 0.544914960861206, "learning_rate": 2.9276666666666667e-05, "loss": 0.007, "step": 12173 }, { "epoch": 22.46168051708218, "grad_norm": 0.5627887845039368, "learning_rate": 2.9276333333333333e-05, "loss": 0.0282, "step": 12174 }, { "epoch": 22.46352723915051, "grad_norm": 0.5315102338790894, "learning_rate": 2.9276e-05, "loss": 0.1304, "step": 12175 }, { "epoch": 22.465373961218837, "grad_norm": 0.6638376712799072, "learning_rate": 2.9275666666666668e-05, "loss": 0.1575, "step": 12176 }, { "epoch": 22.467220683287167, "grad_norm": 0.543993353843689, "learning_rate": 2.9275333333333334e-05, "loss": 0.0801, "step": 12177 }, { "epoch": 22.469067405355496, "grad_norm": 0.48404455184936523, "learning_rate": 2.9275e-05, "loss": 0.1037, "step": 12178 }, { "epoch": 22.47091412742382, "grad_norm": 0.5442525744438171, "learning_rate": 2.927466666666667e-05, "loss": 0.1334, "step": 12179 }, { "epoch": 22.47276084949215, "grad_norm": 0.3651285767555237, "learning_rate": 2.9274333333333332e-05, "loss": 0.0792, "step": 12180 }, { "epoch": 22.47460757156048, "grad_norm": 0.6731127500534058, "learning_rate": 2.9274e-05, "loss": 0.0938, "step": 12181 }, { "epoch": 22.47645429362881, "grad_norm": 0.34531083703041077, "learning_rate": 2.9273666666666667e-05, "loss": 0.0419, "step": 12182 }, { "epoch": 22.478301015697138, "grad_norm": 0.4028952717781067, "learning_rate": 2.9273333333333333e-05, "loss": 0.0674, "step": 12183 }, { "epoch": 22.480147737765467, "grad_norm": 0.358442485332489, "learning_rate": 2.9273000000000002e-05, "loss": 0.0481, "step": 12184 }, { "epoch": 22.481994459833796, "grad_norm": 0.44311586022377014, "learning_rate": 2.927266666666667e-05, "loss": 0.0316, "step": 12185 }, { "epoch": 22.483841181902125, "grad_norm": 0.32679107785224915, "learning_rate": 2.9272333333333334e-05, "loss": 0.0895, "step": 12186 }, { "epoch": 22.48568790397045, "grad_norm": 0.44924479722976685, "learning_rate": 2.9272e-05, "loss": 0.0197, "step": 12187 }, { "epoch": 22.48753462603878, "grad_norm": 0.22197957336902618, "learning_rate": 2.927166666666667e-05, "loss": 0.0312, "step": 12188 }, { "epoch": 22.48938134810711, "grad_norm": 0.22441096603870392, "learning_rate": 2.9271333333333332e-05, "loss": 0.0378, "step": 12189 }, { "epoch": 22.49122807017544, "grad_norm": 0.24108120799064636, "learning_rate": 2.9271e-05, "loss": 0.0108, "step": 12190 }, { "epoch": 22.493074792243767, "grad_norm": 0.22874818742275238, "learning_rate": 2.9270666666666667e-05, "loss": 0.017, "step": 12191 }, { "epoch": 22.494921514312097, "grad_norm": 0.3547055423259735, "learning_rate": 2.9270333333333333e-05, "loss": 0.0107, "step": 12192 }, { "epoch": 22.496768236380426, "grad_norm": 0.1598084270954132, "learning_rate": 2.9270000000000003e-05, "loss": 0.0065, "step": 12193 }, { "epoch": 22.498614958448755, "grad_norm": 0.1760522723197937, "learning_rate": 2.926966666666667e-05, "loss": 0.0057, "step": 12194 }, { "epoch": 22.50046168051708, "grad_norm": 0.1980983018875122, "learning_rate": 2.9269333333333335e-05, "loss": 0.0042, "step": 12195 }, { "epoch": 22.50230840258541, "grad_norm": 0.24898752570152283, "learning_rate": 2.9269e-05, "loss": 0.0054, "step": 12196 }, { "epoch": 22.50415512465374, "grad_norm": 0.42561447620391846, "learning_rate": 2.9268666666666666e-05, "loss": 0.0061, "step": 12197 }, { "epoch": 22.506001846722068, "grad_norm": 0.3103271424770355, "learning_rate": 2.9268333333333332e-05, "loss": 0.0088, "step": 12198 }, { "epoch": 22.507848568790397, "grad_norm": 0.2575102746486664, "learning_rate": 2.9268e-05, "loss": 0.008, "step": 12199 }, { "epoch": 22.509695290858726, "grad_norm": 0.14075084030628204, "learning_rate": 2.9267666666666664e-05, "loss": 0.0051, "step": 12200 }, { "epoch": 22.511542012927055, "grad_norm": 0.38890862464904785, "learning_rate": 2.9267333333333334e-05, "loss": 0.0121, "step": 12201 }, { "epoch": 22.513388734995385, "grad_norm": 0.15616172552108765, "learning_rate": 2.9267000000000003e-05, "loss": 0.0037, "step": 12202 }, { "epoch": 22.51523545706371, "grad_norm": 0.5177579522132874, "learning_rate": 2.9266666666666665e-05, "loss": 0.0101, "step": 12203 }, { "epoch": 22.51708217913204, "grad_norm": 0.20640073716640472, "learning_rate": 2.9266333333333335e-05, "loss": 0.0072, "step": 12204 }, { "epoch": 22.51892890120037, "grad_norm": 0.2602652907371521, "learning_rate": 2.9266e-05, "loss": 0.01, "step": 12205 }, { "epoch": 22.520775623268698, "grad_norm": 0.29666656255722046, "learning_rate": 2.9265666666666667e-05, "loss": 0.0082, "step": 12206 }, { "epoch": 22.522622345337027, "grad_norm": 0.33792397379875183, "learning_rate": 2.9265333333333333e-05, "loss": 0.0111, "step": 12207 }, { "epoch": 22.524469067405356, "grad_norm": 0.22936464846134186, "learning_rate": 2.9265000000000002e-05, "loss": 0.0093, "step": 12208 }, { "epoch": 22.526315789473685, "grad_norm": 0.270159512758255, "learning_rate": 2.9264666666666668e-05, "loss": 0.0086, "step": 12209 }, { "epoch": 22.528162511542014, "grad_norm": 0.14591240882873535, "learning_rate": 2.9264333333333334e-05, "loss": 0.0035, "step": 12210 }, { "epoch": 22.530009233610343, "grad_norm": 0.15915511548519135, "learning_rate": 2.9264000000000003e-05, "loss": 0.0059, "step": 12211 }, { "epoch": 22.53185595567867, "grad_norm": 0.20213396847248077, "learning_rate": 2.9263666666666666e-05, "loss": 0.0053, "step": 12212 }, { "epoch": 22.533702677746998, "grad_norm": 0.26984938979148865, "learning_rate": 2.9263333333333335e-05, "loss": 0.008, "step": 12213 }, { "epoch": 22.535549399815327, "grad_norm": 0.3322078287601471, "learning_rate": 2.9263e-05, "loss": 0.009, "step": 12214 }, { "epoch": 22.537396121883656, "grad_norm": 0.7859511375427246, "learning_rate": 2.9262666666666667e-05, "loss": 0.0101, "step": 12215 }, { "epoch": 22.539242843951985, "grad_norm": 0.18283531069755554, "learning_rate": 2.9262333333333333e-05, "loss": 0.0044, "step": 12216 }, { "epoch": 22.541089566020315, "grad_norm": 0.3152421712875366, "learning_rate": 2.9262000000000002e-05, "loss": 0.0115, "step": 12217 }, { "epoch": 22.542936288088644, "grad_norm": 0.24157525599002838, "learning_rate": 2.9261666666666668e-05, "loss": 0.0057, "step": 12218 }, { "epoch": 22.544783010156973, "grad_norm": 0.26296094059944153, "learning_rate": 2.9261333333333334e-05, "loss": 0.0093, "step": 12219 }, { "epoch": 22.5466297322253, "grad_norm": 0.4341523051261902, "learning_rate": 2.9261000000000003e-05, "loss": 0.0098, "step": 12220 }, { "epoch": 22.548476454293628, "grad_norm": 0.2000827044248581, "learning_rate": 2.9260666666666666e-05, "loss": 0.0048, "step": 12221 }, { "epoch": 22.550323176361957, "grad_norm": 0.4425687789916992, "learning_rate": 2.9260333333333335e-05, "loss": 0.0083, "step": 12222 }, { "epoch": 22.552169898430286, "grad_norm": 0.3309001922607422, "learning_rate": 2.926e-05, "loss": 0.0098, "step": 12223 }, { "epoch": 22.554016620498615, "grad_norm": 0.18561774492263794, "learning_rate": 2.9259666666666667e-05, "loss": 0.0151, "step": 12224 }, { "epoch": 22.555863342566944, "grad_norm": 0.5745847225189209, "learning_rate": 2.9259333333333333e-05, "loss": 0.1422, "step": 12225 }, { "epoch": 22.557710064635273, "grad_norm": 0.4258785545825958, "learning_rate": 2.9259e-05, "loss": 0.1035, "step": 12226 }, { "epoch": 22.559556786703602, "grad_norm": 0.554884672164917, "learning_rate": 2.9258666666666668e-05, "loss": 0.093, "step": 12227 }, { "epoch": 22.56140350877193, "grad_norm": 0.35626792907714844, "learning_rate": 2.9258333333333334e-05, "loss": 0.0755, "step": 12228 }, { "epoch": 22.563250230840257, "grad_norm": 0.5393857955932617, "learning_rate": 2.9258e-05, "loss": 0.0659, "step": 12229 }, { "epoch": 22.565096952908586, "grad_norm": 0.43814486265182495, "learning_rate": 2.9257666666666666e-05, "loss": 0.0474, "step": 12230 }, { "epoch": 22.566943674976915, "grad_norm": 0.6040318012237549, "learning_rate": 2.9257333333333335e-05, "loss": 0.0505, "step": 12231 }, { "epoch": 22.568790397045245, "grad_norm": 0.4554961919784546, "learning_rate": 2.9256999999999998e-05, "loss": 0.064, "step": 12232 }, { "epoch": 22.570637119113574, "grad_norm": 0.36496084928512573, "learning_rate": 2.9256666666666667e-05, "loss": 0.0463, "step": 12233 }, { "epoch": 22.572483841181903, "grad_norm": 0.8404495716094971, "learning_rate": 2.9256333333333337e-05, "loss": 0.038, "step": 12234 }, { "epoch": 22.574330563250232, "grad_norm": 0.37269464135169983, "learning_rate": 2.9256e-05, "loss": 0.0317, "step": 12235 }, { "epoch": 22.57617728531856, "grad_norm": 0.659870445728302, "learning_rate": 2.925566666666667e-05, "loss": 0.0335, "step": 12236 }, { "epoch": 22.578024007386887, "grad_norm": 0.22030824422836304, "learning_rate": 2.9255333333333334e-05, "loss": 0.0308, "step": 12237 }, { "epoch": 22.579870729455216, "grad_norm": 0.3626607358455658, "learning_rate": 2.9255e-05, "loss": 0.0477, "step": 12238 }, { "epoch": 22.581717451523545, "grad_norm": 0.2966305911540985, "learning_rate": 2.9254666666666666e-05, "loss": 0.0113, "step": 12239 }, { "epoch": 22.583564173591874, "grad_norm": 0.3302536904811859, "learning_rate": 2.9254333333333336e-05, "loss": 0.0311, "step": 12240 }, { "epoch": 22.585410895660203, "grad_norm": 0.1487927883863449, "learning_rate": 2.9253999999999998e-05, "loss": 0.0076, "step": 12241 }, { "epoch": 22.587257617728532, "grad_norm": 0.22927799820899963, "learning_rate": 2.9253666666666667e-05, "loss": 0.0106, "step": 12242 }, { "epoch": 22.58910433979686, "grad_norm": 0.1772453337907791, "learning_rate": 2.9253333333333337e-05, "loss": 0.0095, "step": 12243 }, { "epoch": 22.59095106186519, "grad_norm": 0.6161147356033325, "learning_rate": 2.9253e-05, "loss": 0.0099, "step": 12244 }, { "epoch": 22.592797783933516, "grad_norm": 0.16344311833381653, "learning_rate": 2.925266666666667e-05, "loss": 0.0298, "step": 12245 }, { "epoch": 22.594644506001845, "grad_norm": 0.299240380525589, "learning_rate": 2.9252333333333335e-05, "loss": 0.0086, "step": 12246 }, { "epoch": 22.596491228070175, "grad_norm": 0.23759780824184418, "learning_rate": 2.9252e-05, "loss": 0.0075, "step": 12247 }, { "epoch": 22.598337950138504, "grad_norm": 0.14949437975883484, "learning_rate": 2.9251666666666666e-05, "loss": 0.0048, "step": 12248 }, { "epoch": 22.600184672206833, "grad_norm": 0.2368757277727127, "learning_rate": 2.9251333333333336e-05, "loss": 0.0061, "step": 12249 }, { "epoch": 22.602031394275162, "grad_norm": 0.2707505226135254, "learning_rate": 2.9250999999999998e-05, "loss": 0.0055, "step": 12250 }, { "epoch": 22.60387811634349, "grad_norm": 0.34671154618263245, "learning_rate": 2.9250666666666668e-05, "loss": 0.023, "step": 12251 }, { "epoch": 22.60572483841182, "grad_norm": 0.14770708978176117, "learning_rate": 2.9250333333333337e-05, "loss": 0.0025, "step": 12252 }, { "epoch": 22.607571560480146, "grad_norm": 0.30914875864982605, "learning_rate": 2.925e-05, "loss": 0.0089, "step": 12253 }, { "epoch": 22.609418282548475, "grad_norm": 0.3850860893726349, "learning_rate": 2.924966666666667e-05, "loss": 0.0096, "step": 12254 }, { "epoch": 22.611265004616804, "grad_norm": 0.7597085237503052, "learning_rate": 2.9249333333333335e-05, "loss": 0.0407, "step": 12255 }, { "epoch": 22.613111726685133, "grad_norm": 0.13821229338645935, "learning_rate": 2.9249e-05, "loss": 0.0022, "step": 12256 }, { "epoch": 22.614958448753463, "grad_norm": 0.2871163487434387, "learning_rate": 2.9248666666666667e-05, "loss": 0.0092, "step": 12257 }, { "epoch": 22.61680517082179, "grad_norm": 0.17912018299102783, "learning_rate": 2.9248333333333333e-05, "loss": 0.0068, "step": 12258 }, { "epoch": 22.61865189289012, "grad_norm": 0.21821361780166626, "learning_rate": 2.9248000000000002e-05, "loss": 0.0055, "step": 12259 }, { "epoch": 22.62049861495845, "grad_norm": 0.09547823667526245, "learning_rate": 2.9247666666666668e-05, "loss": 0.0034, "step": 12260 }, { "epoch": 22.62234533702678, "grad_norm": 0.2564859092235565, "learning_rate": 2.9247333333333334e-05, "loss": 0.0101, "step": 12261 }, { "epoch": 22.624192059095105, "grad_norm": 0.37441202998161316, "learning_rate": 2.9247e-05, "loss": 0.0173, "step": 12262 }, { "epoch": 22.626038781163434, "grad_norm": 0.3169558644294739, "learning_rate": 2.924666666666667e-05, "loss": 0.0136, "step": 12263 }, { "epoch": 22.627885503231763, "grad_norm": 0.4918834865093231, "learning_rate": 2.924633333333333e-05, "loss": 0.0113, "step": 12264 }, { "epoch": 22.629732225300092, "grad_norm": 0.4788316786289215, "learning_rate": 2.9246e-05, "loss": 0.0171, "step": 12265 }, { "epoch": 22.63157894736842, "grad_norm": 0.18662941455841064, "learning_rate": 2.9245666666666667e-05, "loss": 0.0084, "step": 12266 }, { "epoch": 22.63342566943675, "grad_norm": 0.2884886860847473, "learning_rate": 2.9245333333333333e-05, "loss": 0.0075, "step": 12267 }, { "epoch": 22.63527239150508, "grad_norm": 0.32492247223854065, "learning_rate": 2.9245000000000002e-05, "loss": 0.0111, "step": 12268 }, { "epoch": 22.63711911357341, "grad_norm": 0.16241343319416046, "learning_rate": 2.9244666666666668e-05, "loss": 0.0052, "step": 12269 }, { "epoch": 22.638965835641734, "grad_norm": 0.8470730781555176, "learning_rate": 2.9244333333333334e-05, "loss": 0.0159, "step": 12270 }, { "epoch": 22.640812557710063, "grad_norm": 0.6832287311553955, "learning_rate": 2.9244e-05, "loss": 0.0084, "step": 12271 }, { "epoch": 22.642659279778393, "grad_norm": 0.7469265460968018, "learning_rate": 2.924366666666667e-05, "loss": 0.0117, "step": 12272 }, { "epoch": 22.64450600184672, "grad_norm": 0.5027961134910583, "learning_rate": 2.9243333333333332e-05, "loss": 0.013, "step": 12273 }, { "epoch": 22.64635272391505, "grad_norm": 0.536918580532074, "learning_rate": 2.9243e-05, "loss": 0.0101, "step": 12274 }, { "epoch": 22.64819944598338, "grad_norm": 0.6451272368431091, "learning_rate": 2.9242666666666667e-05, "loss": 0.111, "step": 12275 }, { "epoch": 22.65004616805171, "grad_norm": 0.576657235622406, "learning_rate": 2.9242333333333333e-05, "loss": 0.1548, "step": 12276 }, { "epoch": 22.65189289012004, "grad_norm": 0.4048967957496643, "learning_rate": 2.9242000000000002e-05, "loss": 0.0777, "step": 12277 }, { "epoch": 22.653739612188367, "grad_norm": 0.6146196722984314, "learning_rate": 2.9241666666666668e-05, "loss": 0.0728, "step": 12278 }, { "epoch": 22.655586334256693, "grad_norm": 0.541233479976654, "learning_rate": 2.9241333333333334e-05, "loss": 0.0761, "step": 12279 }, { "epoch": 22.657433056325022, "grad_norm": 0.49906066060066223, "learning_rate": 2.9241e-05, "loss": 0.0583, "step": 12280 }, { "epoch": 22.65927977839335, "grad_norm": 0.3858031928539276, "learning_rate": 2.924066666666667e-05, "loss": 0.0799, "step": 12281 }, { "epoch": 22.66112650046168, "grad_norm": 0.34499648213386536, "learning_rate": 2.9240333333333332e-05, "loss": 0.0415, "step": 12282 }, { "epoch": 22.66297322253001, "grad_norm": 0.5997113585472107, "learning_rate": 2.924e-05, "loss": 0.0449, "step": 12283 }, { "epoch": 22.66481994459834, "grad_norm": 0.35634171962738037, "learning_rate": 2.923966666666667e-05, "loss": 0.0362, "step": 12284 }, { "epoch": 22.666666666666668, "grad_norm": 0.37014394998550415, "learning_rate": 2.9239333333333333e-05, "loss": 0.0327, "step": 12285 }, { "epoch": 22.668513388734997, "grad_norm": 0.49967631697654724, "learning_rate": 2.9239000000000002e-05, "loss": 0.0628, "step": 12286 }, { "epoch": 22.670360110803323, "grad_norm": 0.2549503743648529, "learning_rate": 2.9238666666666665e-05, "loss": 0.0292, "step": 12287 }, { "epoch": 22.67220683287165, "grad_norm": 0.5967966318130493, "learning_rate": 2.9238333333333334e-05, "loss": 0.015, "step": 12288 }, { "epoch": 22.67405355493998, "grad_norm": 0.36082586646080017, "learning_rate": 2.9238e-05, "loss": 0.0207, "step": 12289 }, { "epoch": 22.67590027700831, "grad_norm": 0.5506548285484314, "learning_rate": 2.9237666666666666e-05, "loss": 0.0234, "step": 12290 }, { "epoch": 22.67774699907664, "grad_norm": 0.3026949465274811, "learning_rate": 2.9237333333333332e-05, "loss": 0.0488, "step": 12291 }, { "epoch": 22.67959372114497, "grad_norm": 0.1955622285604477, "learning_rate": 2.9237e-05, "loss": 0.0094, "step": 12292 }, { "epoch": 22.681440443213297, "grad_norm": 0.1570124328136444, "learning_rate": 2.9236666666666667e-05, "loss": 0.0082, "step": 12293 }, { "epoch": 22.683287165281627, "grad_norm": 0.2552327811717987, "learning_rate": 2.9236333333333333e-05, "loss": 0.0144, "step": 12294 }, { "epoch": 22.685133887349952, "grad_norm": 0.16709206998348236, "learning_rate": 2.9236000000000003e-05, "loss": 0.0078, "step": 12295 }, { "epoch": 22.68698060941828, "grad_norm": 0.3998396694660187, "learning_rate": 2.9235666666666665e-05, "loss": 0.0126, "step": 12296 }, { "epoch": 22.68882733148661, "grad_norm": 0.30569109320640564, "learning_rate": 2.9235333333333335e-05, "loss": 0.0095, "step": 12297 }, { "epoch": 22.69067405355494, "grad_norm": 0.14271549880504608, "learning_rate": 2.9235e-05, "loss": 0.0078, "step": 12298 }, { "epoch": 22.69252077562327, "grad_norm": 0.2638216018676758, "learning_rate": 2.9234666666666666e-05, "loss": 0.0067, "step": 12299 }, { "epoch": 22.694367497691598, "grad_norm": 0.2199135720729828, "learning_rate": 2.9234333333333332e-05, "loss": 0.0054, "step": 12300 }, { "epoch": 22.696214219759927, "grad_norm": 0.22184860706329346, "learning_rate": 2.9234e-05, "loss": 0.0059, "step": 12301 }, { "epoch": 22.698060941828256, "grad_norm": 0.2710758447647095, "learning_rate": 2.9233666666666668e-05, "loss": 0.0062, "step": 12302 }, { "epoch": 22.69990766389658, "grad_norm": 0.38386720418930054, "learning_rate": 2.9233333333333334e-05, "loss": 0.0252, "step": 12303 }, { "epoch": 22.70175438596491, "grad_norm": 0.23074857890605927, "learning_rate": 2.9233000000000003e-05, "loss": 0.0105, "step": 12304 }, { "epoch": 22.70360110803324, "grad_norm": 0.12127603590488434, "learning_rate": 2.9232666666666665e-05, "loss": 0.0064, "step": 12305 }, { "epoch": 22.70544783010157, "grad_norm": 0.29814377427101135, "learning_rate": 2.9232333333333335e-05, "loss": 0.0078, "step": 12306 }, { "epoch": 22.7072945521699, "grad_norm": 0.18820050358772278, "learning_rate": 2.9232e-05, "loss": 0.0058, "step": 12307 }, { "epoch": 22.709141274238227, "grad_norm": 0.3828445374965668, "learning_rate": 2.9231666666666667e-05, "loss": 0.0119, "step": 12308 }, { "epoch": 22.710987996306557, "grad_norm": 0.3817533850669861, "learning_rate": 2.9231333333333336e-05, "loss": 0.0095, "step": 12309 }, { "epoch": 22.712834718374886, "grad_norm": 0.270891398191452, "learning_rate": 2.9231000000000002e-05, "loss": 0.0106, "step": 12310 }, { "epoch": 22.714681440443215, "grad_norm": 0.09616384655237198, "learning_rate": 2.9230666666666668e-05, "loss": 0.0034, "step": 12311 }, { "epoch": 22.71652816251154, "grad_norm": 0.16380581259727478, "learning_rate": 2.9230333333333334e-05, "loss": 0.0027, "step": 12312 }, { "epoch": 22.71837488457987, "grad_norm": 0.35364049673080444, "learning_rate": 2.9230000000000003e-05, "loss": 0.0121, "step": 12313 }, { "epoch": 22.7202216066482, "grad_norm": 0.4222791790962219, "learning_rate": 2.9229666666666666e-05, "loss": 0.0134, "step": 12314 }, { "epoch": 22.722068328716528, "grad_norm": 0.3655267655849457, "learning_rate": 2.9229333333333335e-05, "loss": 0.0047, "step": 12315 }, { "epoch": 22.723915050784857, "grad_norm": 0.1754109412431717, "learning_rate": 2.9229e-05, "loss": 0.0056, "step": 12316 }, { "epoch": 22.725761772853186, "grad_norm": 0.21043196320533752, "learning_rate": 2.9228666666666667e-05, "loss": 0.0084, "step": 12317 }, { "epoch": 22.727608494921515, "grad_norm": 0.26307398080825806, "learning_rate": 2.9228333333333336e-05, "loss": 0.008, "step": 12318 }, { "epoch": 22.729455216989845, "grad_norm": 0.43681734800338745, "learning_rate": 2.9228e-05, "loss": 0.0064, "step": 12319 }, { "epoch": 22.73130193905817, "grad_norm": 0.267784059047699, "learning_rate": 2.9227666666666668e-05, "loss": 0.0069, "step": 12320 }, { "epoch": 22.7331486611265, "grad_norm": 0.22806259989738464, "learning_rate": 2.9227333333333334e-05, "loss": 0.0081, "step": 12321 }, { "epoch": 22.73499538319483, "grad_norm": 0.16762927174568176, "learning_rate": 2.9227e-05, "loss": 0.0072, "step": 12322 }, { "epoch": 22.736842105263158, "grad_norm": 0.41946953535079956, "learning_rate": 2.9226666666666666e-05, "loss": 0.0075, "step": 12323 }, { "epoch": 22.738688827331487, "grad_norm": 0.3906378448009491, "learning_rate": 2.9226333333333335e-05, "loss": 0.0139, "step": 12324 }, { "epoch": 22.740535549399816, "grad_norm": 0.5999815464019775, "learning_rate": 2.9226e-05, "loss": 0.1396, "step": 12325 }, { "epoch": 22.742382271468145, "grad_norm": 0.3809105157852173, "learning_rate": 2.9225666666666667e-05, "loss": 0.0924, "step": 12326 }, { "epoch": 22.744228993536474, "grad_norm": 0.3599500060081482, "learning_rate": 2.9225333333333336e-05, "loss": 0.053, "step": 12327 }, { "epoch": 22.746075715604803, "grad_norm": 0.529497504234314, "learning_rate": 2.9225e-05, "loss": 0.0831, "step": 12328 }, { "epoch": 22.74792243767313, "grad_norm": 0.5233191847801208, "learning_rate": 2.9224666666666668e-05, "loss": 0.0952, "step": 12329 }, { "epoch": 22.749769159741458, "grad_norm": 0.5627424716949463, "learning_rate": 2.9224333333333334e-05, "loss": 0.1162, "step": 12330 }, { "epoch": 22.751615881809787, "grad_norm": 0.5519795417785645, "learning_rate": 2.9224e-05, "loss": 0.0477, "step": 12331 }, { "epoch": 22.753462603878116, "grad_norm": 0.6382800936698914, "learning_rate": 2.9223666666666666e-05, "loss": 0.0557, "step": 12332 }, { "epoch": 22.755309325946445, "grad_norm": 0.3135432004928589, "learning_rate": 2.9223333333333335e-05, "loss": 0.037, "step": 12333 }, { "epoch": 22.757156048014775, "grad_norm": 0.26903706789016724, "learning_rate": 2.9223e-05, "loss": 0.0289, "step": 12334 }, { "epoch": 22.759002770083104, "grad_norm": 0.3219813108444214, "learning_rate": 2.9222666666666667e-05, "loss": 0.0197, "step": 12335 }, { "epoch": 22.760849492151433, "grad_norm": 0.611541211605072, "learning_rate": 2.9222333333333337e-05, "loss": 0.0809, "step": 12336 }, { "epoch": 22.76269621421976, "grad_norm": 0.5747827291488647, "learning_rate": 2.9222e-05, "loss": 0.0344, "step": 12337 }, { "epoch": 22.764542936288088, "grad_norm": 0.2298043668270111, "learning_rate": 2.922166666666667e-05, "loss": 0.022, "step": 12338 }, { "epoch": 22.766389658356417, "grad_norm": 0.3402498960494995, "learning_rate": 2.9221333333333334e-05, "loss": 0.0213, "step": 12339 }, { "epoch": 22.768236380424746, "grad_norm": 0.2586126923561096, "learning_rate": 2.9221e-05, "loss": 0.0232, "step": 12340 }, { "epoch": 22.770083102493075, "grad_norm": 0.15024542808532715, "learning_rate": 2.9220666666666666e-05, "loss": 0.0079, "step": 12341 }, { "epoch": 22.771929824561404, "grad_norm": 0.3457852900028229, "learning_rate": 2.9220333333333336e-05, "loss": 0.0245, "step": 12342 }, { "epoch": 22.773776546629733, "grad_norm": 0.1955130249261856, "learning_rate": 2.922e-05, "loss": 0.0227, "step": 12343 }, { "epoch": 22.775623268698062, "grad_norm": 0.16288869082927704, "learning_rate": 2.9219666666666667e-05, "loss": 0.0053, "step": 12344 }, { "epoch": 22.777469990766388, "grad_norm": 0.17608100175857544, "learning_rate": 2.9219333333333337e-05, "loss": 0.0078, "step": 12345 }, { "epoch": 22.779316712834717, "grad_norm": 0.3658122420310974, "learning_rate": 2.9219e-05, "loss": 0.013, "step": 12346 }, { "epoch": 22.781163434903046, "grad_norm": 0.130655437707901, "learning_rate": 2.921866666666667e-05, "loss": 0.004, "step": 12347 }, { "epoch": 22.783010156971375, "grad_norm": 0.4952363967895508, "learning_rate": 2.921833333333333e-05, "loss": 0.0066, "step": 12348 }, { "epoch": 22.784856879039705, "grad_norm": 0.278836727142334, "learning_rate": 2.9218e-05, "loss": 0.009, "step": 12349 }, { "epoch": 22.786703601108034, "grad_norm": 0.13057789206504822, "learning_rate": 2.9217666666666666e-05, "loss": 0.0044, "step": 12350 }, { "epoch": 22.788550323176363, "grad_norm": 0.15483197569847107, "learning_rate": 2.9217333333333332e-05, "loss": 0.006, "step": 12351 }, { "epoch": 22.790397045244692, "grad_norm": 0.6134659051895142, "learning_rate": 2.9217e-05, "loss": 0.0126, "step": 12352 }, { "epoch": 22.792243767313018, "grad_norm": 0.2991887927055359, "learning_rate": 2.9216666666666668e-05, "loss": 0.0077, "step": 12353 }, { "epoch": 22.794090489381347, "grad_norm": 0.1424655318260193, "learning_rate": 2.9216333333333334e-05, "loss": 0.003, "step": 12354 }, { "epoch": 22.795937211449676, "grad_norm": 1.4153794050216675, "learning_rate": 2.9216e-05, "loss": 0.0358, "step": 12355 }, { "epoch": 22.797783933518005, "grad_norm": 0.13688984513282776, "learning_rate": 2.921566666666667e-05, "loss": 0.004, "step": 12356 }, { "epoch": 22.799630655586334, "grad_norm": 0.38424474000930786, "learning_rate": 2.921533333333333e-05, "loss": 0.0066, "step": 12357 }, { "epoch": 22.801477377654663, "grad_norm": 0.19820640981197357, "learning_rate": 2.9215e-05, "loss": 0.0051, "step": 12358 }, { "epoch": 22.803324099722992, "grad_norm": 0.20740287005901337, "learning_rate": 2.921466666666667e-05, "loss": 0.007, "step": 12359 }, { "epoch": 22.80517082179132, "grad_norm": 0.14528685808181763, "learning_rate": 2.9214333333333333e-05, "loss": 0.0036, "step": 12360 }, { "epoch": 22.80701754385965, "grad_norm": 0.1387391835451126, "learning_rate": 2.9214000000000002e-05, "loss": 0.0283, "step": 12361 }, { "epoch": 22.808864265927976, "grad_norm": 0.3198137581348419, "learning_rate": 2.9213666666666668e-05, "loss": 0.0107, "step": 12362 }, { "epoch": 22.810710987996305, "grad_norm": 0.6842020750045776, "learning_rate": 2.9213333333333334e-05, "loss": 0.0076, "step": 12363 }, { "epoch": 22.812557710064635, "grad_norm": 0.3192748725414276, "learning_rate": 2.9213e-05, "loss": 0.0049, "step": 12364 }, { "epoch": 22.814404432132964, "grad_norm": 0.15512192249298096, "learning_rate": 2.921266666666667e-05, "loss": 0.0048, "step": 12365 }, { "epoch": 22.816251154201293, "grad_norm": 0.15249937772750854, "learning_rate": 2.921233333333333e-05, "loss": 0.0041, "step": 12366 }, { "epoch": 22.818097876269622, "grad_norm": 0.44840556383132935, "learning_rate": 2.9212e-05, "loss": 0.0116, "step": 12367 }, { "epoch": 22.81994459833795, "grad_norm": 0.5910830497741699, "learning_rate": 2.921166666666667e-05, "loss": 0.0089, "step": 12368 }, { "epoch": 22.82179132040628, "grad_norm": 0.25855201482772827, "learning_rate": 2.9211333333333333e-05, "loss": 0.0104, "step": 12369 }, { "epoch": 22.823638042474606, "grad_norm": 0.25850245356559753, "learning_rate": 2.9211000000000002e-05, "loss": 0.0112, "step": 12370 }, { "epoch": 22.825484764542935, "grad_norm": 0.40904897451400757, "learning_rate": 2.9210666666666668e-05, "loss": 0.0103, "step": 12371 }, { "epoch": 22.827331486611264, "grad_norm": 0.4205116927623749, "learning_rate": 2.9210333333333334e-05, "loss": 0.0101, "step": 12372 }, { "epoch": 22.829178208679593, "grad_norm": 0.2385093718767166, "learning_rate": 2.921e-05, "loss": 0.0056, "step": 12373 }, { "epoch": 22.831024930747922, "grad_norm": 0.3131072223186493, "learning_rate": 2.920966666666667e-05, "loss": 0.0096, "step": 12374 }, { "epoch": 22.83287165281625, "grad_norm": 0.4860314130783081, "learning_rate": 2.9209333333333335e-05, "loss": 0.0993, "step": 12375 }, { "epoch": 22.83471837488458, "grad_norm": 0.5568736791610718, "learning_rate": 2.9209e-05, "loss": 0.1497, "step": 12376 }, { "epoch": 22.83656509695291, "grad_norm": 0.649315595626831, "learning_rate": 2.9208666666666667e-05, "loss": 0.1676, "step": 12377 }, { "epoch": 22.83841181902124, "grad_norm": 0.46940651535987854, "learning_rate": 2.9208333333333333e-05, "loss": 0.0722, "step": 12378 }, { "epoch": 22.840258541089565, "grad_norm": 0.43727776408195496, "learning_rate": 2.9208000000000002e-05, "loss": 0.0828, "step": 12379 }, { "epoch": 22.842105263157894, "grad_norm": 0.6778110265731812, "learning_rate": 2.9207666666666665e-05, "loss": 0.05, "step": 12380 }, { "epoch": 22.843951985226223, "grad_norm": 0.3043400049209595, "learning_rate": 2.9207333333333334e-05, "loss": 0.0321, "step": 12381 }, { "epoch": 22.845798707294552, "grad_norm": 0.590112566947937, "learning_rate": 2.9207e-05, "loss": 0.0585, "step": 12382 }, { "epoch": 22.84764542936288, "grad_norm": 0.3127261996269226, "learning_rate": 2.9206666666666666e-05, "loss": 0.0292, "step": 12383 }, { "epoch": 22.84949215143121, "grad_norm": 0.3524312973022461, "learning_rate": 2.9206333333333335e-05, "loss": 0.0343, "step": 12384 }, { "epoch": 22.85133887349954, "grad_norm": 0.3275330662727356, "learning_rate": 2.9206e-05, "loss": 0.021, "step": 12385 }, { "epoch": 22.85318559556787, "grad_norm": 0.2704507112503052, "learning_rate": 2.9205666666666667e-05, "loss": 0.0518, "step": 12386 }, { "epoch": 22.855032317636194, "grad_norm": 0.3582041263580322, "learning_rate": 2.9205333333333333e-05, "loss": 0.0167, "step": 12387 }, { "epoch": 22.856879039704523, "grad_norm": 0.334196537733078, "learning_rate": 2.9205000000000002e-05, "loss": 0.0154, "step": 12388 }, { "epoch": 22.858725761772853, "grad_norm": 0.23437385261058807, "learning_rate": 2.9204666666666665e-05, "loss": 0.0097, "step": 12389 }, { "epoch": 22.86057248384118, "grad_norm": 0.5138218402862549, "learning_rate": 2.9204333333333334e-05, "loss": 0.0144, "step": 12390 }, { "epoch": 22.86241920590951, "grad_norm": 0.422588586807251, "learning_rate": 2.9204e-05, "loss": 0.0121, "step": 12391 }, { "epoch": 22.86426592797784, "grad_norm": 0.2288866490125656, "learning_rate": 2.9203666666666666e-05, "loss": 0.0129, "step": 12392 }, { "epoch": 22.86611265004617, "grad_norm": 0.1804754137992859, "learning_rate": 2.9203333333333336e-05, "loss": 0.0082, "step": 12393 }, { "epoch": 22.8679593721145, "grad_norm": 0.2894505262374878, "learning_rate": 2.9203e-05, "loss": 0.01, "step": 12394 }, { "epoch": 22.869806094182824, "grad_norm": 0.2467958778142929, "learning_rate": 2.9202666666666667e-05, "loss": 0.0109, "step": 12395 }, { "epoch": 22.871652816251153, "grad_norm": 0.2496487945318222, "learning_rate": 2.9202333333333333e-05, "loss": 0.0096, "step": 12396 }, { "epoch": 22.873499538319482, "grad_norm": 0.22739478945732117, "learning_rate": 2.9202000000000003e-05, "loss": 0.0033, "step": 12397 }, { "epoch": 22.87534626038781, "grad_norm": 0.1910630315542221, "learning_rate": 2.9201666666666665e-05, "loss": 0.0071, "step": 12398 }, { "epoch": 22.87719298245614, "grad_norm": 0.1774922013282776, "learning_rate": 2.9201333333333335e-05, "loss": 0.0067, "step": 12399 }, { "epoch": 22.87903970452447, "grad_norm": 0.20055580139160156, "learning_rate": 2.9201e-05, "loss": 0.0056, "step": 12400 }, { "epoch": 22.8808864265928, "grad_norm": 0.311154842376709, "learning_rate": 2.9200666666666666e-05, "loss": 0.0077, "step": 12401 }, { "epoch": 22.882733148661128, "grad_norm": 0.23825916647911072, "learning_rate": 2.9200333333333336e-05, "loss": 0.0113, "step": 12402 }, { "epoch": 22.884579870729453, "grad_norm": 0.1427665799856186, "learning_rate": 2.92e-05, "loss": 0.0048, "step": 12403 }, { "epoch": 22.886426592797783, "grad_norm": 0.27265864610671997, "learning_rate": 2.9199666666666668e-05, "loss": 0.0062, "step": 12404 }, { "epoch": 22.88827331486611, "grad_norm": 0.2476198524236679, "learning_rate": 2.9199333333333334e-05, "loss": 0.0093, "step": 12405 }, { "epoch": 22.89012003693444, "grad_norm": 0.23540052771568298, "learning_rate": 2.9199000000000003e-05, "loss": 0.0033, "step": 12406 }, { "epoch": 22.89196675900277, "grad_norm": 0.14567874372005463, "learning_rate": 2.9198666666666665e-05, "loss": 0.0049, "step": 12407 }, { "epoch": 22.8938134810711, "grad_norm": 0.20529676973819733, "learning_rate": 2.9198333333333335e-05, "loss": 0.0038, "step": 12408 }, { "epoch": 22.89566020313943, "grad_norm": 0.1766471415758133, "learning_rate": 2.9198e-05, "loss": 0.0055, "step": 12409 }, { "epoch": 22.897506925207757, "grad_norm": 0.3108919858932495, "learning_rate": 2.9197666666666667e-05, "loss": 0.0057, "step": 12410 }, { "epoch": 22.899353647276087, "grad_norm": 0.44312068819999695, "learning_rate": 2.9197333333333336e-05, "loss": 0.0169, "step": 12411 }, { "epoch": 22.901200369344412, "grad_norm": 0.9064782857894897, "learning_rate": 2.9197e-05, "loss": 0.0138, "step": 12412 }, { "epoch": 22.90304709141274, "grad_norm": 0.39348894357681274, "learning_rate": 2.9196666666666668e-05, "loss": 0.0108, "step": 12413 }, { "epoch": 22.90489381348107, "grad_norm": 0.11488889157772064, "learning_rate": 2.9196333333333334e-05, "loss": 0.004, "step": 12414 }, { "epoch": 22.9067405355494, "grad_norm": 0.2089713215827942, "learning_rate": 2.9196e-05, "loss": 0.0065, "step": 12415 }, { "epoch": 22.90858725761773, "grad_norm": 0.20302043855190277, "learning_rate": 2.9195666666666666e-05, "loss": 0.0036, "step": 12416 }, { "epoch": 22.910433979686058, "grad_norm": 0.2970274090766907, "learning_rate": 2.9195333333333335e-05, "loss": 0.0065, "step": 12417 }, { "epoch": 22.912280701754387, "grad_norm": 1.0791375637054443, "learning_rate": 2.9195e-05, "loss": 0.0047, "step": 12418 }, { "epoch": 22.914127423822716, "grad_norm": 0.742299497127533, "learning_rate": 2.9194666666666667e-05, "loss": 0.0157, "step": 12419 }, { "epoch": 22.91597414589104, "grad_norm": 0.1627863496541977, "learning_rate": 2.9194333333333336e-05, "loss": 0.0041, "step": 12420 }, { "epoch": 22.91782086795937, "grad_norm": 0.10822322219610214, "learning_rate": 2.9194e-05, "loss": 0.0026, "step": 12421 }, { "epoch": 22.9196675900277, "grad_norm": 0.6408373713493347, "learning_rate": 2.9193666666666668e-05, "loss": 0.0173, "step": 12422 }, { "epoch": 22.92151431209603, "grad_norm": 0.3392656445503235, "learning_rate": 2.9193333333333334e-05, "loss": 0.0089, "step": 12423 }, { "epoch": 22.92336103416436, "grad_norm": 0.6655851006507874, "learning_rate": 2.9193e-05, "loss": 0.0062, "step": 12424 }, { "epoch": 22.925207756232687, "grad_norm": 0.5653405785560608, "learning_rate": 2.919266666666667e-05, "loss": 0.1226, "step": 12425 }, { "epoch": 22.927054478301017, "grad_norm": 0.8246968388557434, "learning_rate": 2.9192333333333335e-05, "loss": 0.1196, "step": 12426 }, { "epoch": 22.928901200369346, "grad_norm": 0.5462274551391602, "learning_rate": 2.9192e-05, "loss": 0.0869, "step": 12427 }, { "epoch": 22.930747922437675, "grad_norm": 0.31987103819847107, "learning_rate": 2.9191666666666667e-05, "loss": 0.0669, "step": 12428 }, { "epoch": 22.932594644506, "grad_norm": 0.4289247989654541, "learning_rate": 2.9191333333333336e-05, "loss": 0.0557, "step": 12429 }, { "epoch": 22.93444136657433, "grad_norm": 0.31459957361221313, "learning_rate": 2.9191e-05, "loss": 0.0436, "step": 12430 }, { "epoch": 22.93628808864266, "grad_norm": 0.7228941917419434, "learning_rate": 2.9190666666666668e-05, "loss": 0.0688, "step": 12431 }, { "epoch": 22.938134810710988, "grad_norm": 0.3625168800354004, "learning_rate": 2.9190333333333334e-05, "loss": 0.0388, "step": 12432 }, { "epoch": 22.939981532779317, "grad_norm": 0.3639623820781708, "learning_rate": 2.919e-05, "loss": 0.0273, "step": 12433 }, { "epoch": 22.941828254847646, "grad_norm": 0.391364723443985, "learning_rate": 2.918966666666667e-05, "loss": 0.0201, "step": 12434 }, { "epoch": 22.943674976915975, "grad_norm": 0.25264233350753784, "learning_rate": 2.9189333333333335e-05, "loss": 0.0196, "step": 12435 }, { "epoch": 22.945521698984304, "grad_norm": 0.24215036630630493, "learning_rate": 2.9189e-05, "loss": 0.0312, "step": 12436 }, { "epoch": 22.94736842105263, "grad_norm": 0.3020641803741455, "learning_rate": 2.9188666666666667e-05, "loss": 0.034, "step": 12437 }, { "epoch": 22.94921514312096, "grad_norm": 0.19730018079280853, "learning_rate": 2.9188333333333333e-05, "loss": 0.0124, "step": 12438 }, { "epoch": 22.95106186518929, "grad_norm": 0.23389595746994019, "learning_rate": 2.9188e-05, "loss": 0.0103, "step": 12439 }, { "epoch": 22.952908587257618, "grad_norm": 0.18454474210739136, "learning_rate": 2.918766666666667e-05, "loss": 0.0073, "step": 12440 }, { "epoch": 22.954755309325947, "grad_norm": 0.4759189486503601, "learning_rate": 2.918733333333333e-05, "loss": 0.0075, "step": 12441 }, { "epoch": 22.956602031394276, "grad_norm": 0.37919747829437256, "learning_rate": 2.9187e-05, "loss": 0.0115, "step": 12442 }, { "epoch": 22.958448753462605, "grad_norm": 0.2569182813167572, "learning_rate": 2.918666666666667e-05, "loss": 0.0079, "step": 12443 }, { "epoch": 22.960295475530934, "grad_norm": 0.3278430104255676, "learning_rate": 2.9186333333333332e-05, "loss": 0.0053, "step": 12444 }, { "epoch": 22.96214219759926, "grad_norm": 0.20840658247470856, "learning_rate": 2.9186e-05, "loss": 0.0179, "step": 12445 }, { "epoch": 22.96398891966759, "grad_norm": 0.15534381568431854, "learning_rate": 2.9185666666666667e-05, "loss": 0.0057, "step": 12446 }, { "epoch": 22.965835641735918, "grad_norm": 0.1486392617225647, "learning_rate": 2.9185333333333333e-05, "loss": 0.007, "step": 12447 }, { "epoch": 22.967682363804247, "grad_norm": 0.07954803854227066, "learning_rate": 2.9185e-05, "loss": 0.0029, "step": 12448 }, { "epoch": 22.969529085872576, "grad_norm": 0.13395774364471436, "learning_rate": 2.918466666666667e-05, "loss": 0.0049, "step": 12449 }, { "epoch": 22.971375807940905, "grad_norm": 0.17729364335536957, "learning_rate": 2.9184333333333335e-05, "loss": 0.0053, "step": 12450 }, { "epoch": 22.973222530009235, "grad_norm": 0.16160070896148682, "learning_rate": 2.9184e-05, "loss": 0.0042, "step": 12451 }, { "epoch": 22.975069252077564, "grad_norm": 0.28334203362464905, "learning_rate": 2.918366666666667e-05, "loss": 0.0061, "step": 12452 }, { "epoch": 22.97691597414589, "grad_norm": 0.3061280846595764, "learning_rate": 2.9183333333333332e-05, "loss": 0.0086, "step": 12453 }, { "epoch": 22.97876269621422, "grad_norm": 0.09037530422210693, "learning_rate": 2.9183e-05, "loss": 0.0019, "step": 12454 }, { "epoch": 22.980609418282548, "grad_norm": 0.1277635246515274, "learning_rate": 2.9182666666666668e-05, "loss": 0.004, "step": 12455 }, { "epoch": 22.982456140350877, "grad_norm": 0.20603525638580322, "learning_rate": 2.9182333333333334e-05, "loss": 0.009, "step": 12456 }, { "epoch": 22.984302862419206, "grad_norm": 0.23812738060951233, "learning_rate": 2.9182e-05, "loss": 0.0052, "step": 12457 }, { "epoch": 22.986149584487535, "grad_norm": 0.2609906494617462, "learning_rate": 2.918166666666667e-05, "loss": 0.004, "step": 12458 }, { "epoch": 22.987996306555864, "grad_norm": 0.3261003792285919, "learning_rate": 2.9181333333333335e-05, "loss": 0.008, "step": 12459 }, { "epoch": 22.989843028624193, "grad_norm": 0.08258796483278275, "learning_rate": 2.9181e-05, "loss": 0.0019, "step": 12460 }, { "epoch": 22.991689750692522, "grad_norm": 0.28730282187461853, "learning_rate": 2.918066666666667e-05, "loss": 0.0064, "step": 12461 }, { "epoch": 22.993536472760848, "grad_norm": 0.1670047640800476, "learning_rate": 2.9180333333333333e-05, "loss": 0.0064, "step": 12462 }, { "epoch": 22.995383194829177, "grad_norm": 0.23746854066848755, "learning_rate": 2.9180000000000002e-05, "loss": 0.0067, "step": 12463 }, { "epoch": 22.997229916897506, "grad_norm": 0.2047201544046402, "learning_rate": 2.9179666666666668e-05, "loss": 0.0051, "step": 12464 }, { "epoch": 22.999076638965835, "grad_norm": 0.2234080284833908, "learning_rate": 2.9179333333333334e-05, "loss": 0.0064, "step": 12465 }, { "epoch": 23.0, "grad_norm": 0.013559604994952679, "learning_rate": 2.9179e-05, "loss": 0.0002, "step": 12466 }, { "epoch": 23.00184672206833, "grad_norm": 1.3079710006713867, "learning_rate": 2.917866666666667e-05, "loss": 0.1453, "step": 12467 }, { "epoch": 23.00369344413666, "grad_norm": 0.37919649481773376, "learning_rate": 2.9178333333333335e-05, "loss": 0.0945, "step": 12468 }, { "epoch": 23.005540166204987, "grad_norm": 0.5010470151901245, "learning_rate": 2.9178e-05, "loss": 0.0713, "step": 12469 }, { "epoch": 23.007386888273317, "grad_norm": 0.5508612990379333, "learning_rate": 2.9177666666666667e-05, "loss": 0.0923, "step": 12470 }, { "epoch": 23.009233610341642, "grad_norm": 0.37328410148620605, "learning_rate": 2.9177333333333333e-05, "loss": 0.0611, "step": 12471 }, { "epoch": 23.01108033240997, "grad_norm": 0.4111389219760895, "learning_rate": 2.9177000000000002e-05, "loss": 0.081, "step": 12472 }, { "epoch": 23.0129270544783, "grad_norm": 0.37702229619026184, "learning_rate": 2.9176666666666665e-05, "loss": 0.0854, "step": 12473 }, { "epoch": 23.01477377654663, "grad_norm": 0.7613016963005066, "learning_rate": 2.9176333333333334e-05, "loss": 0.0359, "step": 12474 }, { "epoch": 23.01662049861496, "grad_norm": 0.5037769675254822, "learning_rate": 2.9176000000000003e-05, "loss": 0.0771, "step": 12475 }, { "epoch": 23.018467220683288, "grad_norm": 1.4689644575119019, "learning_rate": 2.9175666666666666e-05, "loss": 0.0912, "step": 12476 }, { "epoch": 23.020313942751617, "grad_norm": 0.408502995967865, "learning_rate": 2.9175333333333335e-05, "loss": 0.0248, "step": 12477 }, { "epoch": 23.022160664819946, "grad_norm": 0.375881165266037, "learning_rate": 2.9175e-05, "loss": 0.0332, "step": 12478 }, { "epoch": 23.02400738688827, "grad_norm": 0.19678357243537903, "learning_rate": 2.9174666666666667e-05, "loss": 0.0225, "step": 12479 }, { "epoch": 23.0258541089566, "grad_norm": 0.2557147741317749, "learning_rate": 2.9174333333333333e-05, "loss": 0.0119, "step": 12480 }, { "epoch": 23.02770083102493, "grad_norm": 1.2409764528274536, "learning_rate": 2.9174000000000002e-05, "loss": 0.058, "step": 12481 }, { "epoch": 23.02954755309326, "grad_norm": 0.23163054883480072, "learning_rate": 2.9173666666666665e-05, "loss": 0.0096, "step": 12482 }, { "epoch": 23.03139427516159, "grad_norm": 0.4650649130344391, "learning_rate": 2.9173333333333334e-05, "loss": 0.0358, "step": 12483 }, { "epoch": 23.033240997229917, "grad_norm": 0.16126181185245514, "learning_rate": 2.9173000000000003e-05, "loss": 0.0067, "step": 12484 }, { "epoch": 23.035087719298247, "grad_norm": 0.20258918404579163, "learning_rate": 2.9172666666666666e-05, "loss": 0.0099, "step": 12485 }, { "epoch": 23.036934441366576, "grad_norm": 0.23453587293624878, "learning_rate": 2.9172333333333335e-05, "loss": 0.0086, "step": 12486 }, { "epoch": 23.0387811634349, "grad_norm": 0.33570969104766846, "learning_rate": 2.9172e-05, "loss": 0.0076, "step": 12487 }, { "epoch": 23.04062788550323, "grad_norm": 0.20703759789466858, "learning_rate": 2.9171666666666667e-05, "loss": 0.005, "step": 12488 }, { "epoch": 23.04247460757156, "grad_norm": 0.29281994700431824, "learning_rate": 2.9171333333333333e-05, "loss": 0.0093, "step": 12489 }, { "epoch": 23.04432132963989, "grad_norm": 0.18278279900550842, "learning_rate": 2.9171000000000002e-05, "loss": 0.0055, "step": 12490 }, { "epoch": 23.046168051708218, "grad_norm": 0.15609000623226166, "learning_rate": 2.9170666666666665e-05, "loss": 0.0033, "step": 12491 }, { "epoch": 23.048014773776547, "grad_norm": 0.27524250745773315, "learning_rate": 2.9170333333333334e-05, "loss": 0.0065, "step": 12492 }, { "epoch": 23.049861495844876, "grad_norm": 0.11792323738336563, "learning_rate": 2.9170000000000004e-05, "loss": 0.0031, "step": 12493 }, { "epoch": 23.051708217913205, "grad_norm": 0.6204906702041626, "learning_rate": 2.9169666666666666e-05, "loss": 0.0385, "step": 12494 }, { "epoch": 23.053554939981534, "grad_norm": 0.13774995505809784, "learning_rate": 2.9169333333333335e-05, "loss": 0.0047, "step": 12495 }, { "epoch": 23.05540166204986, "grad_norm": 0.0980542004108429, "learning_rate": 2.9169e-05, "loss": 0.0032, "step": 12496 }, { "epoch": 23.05724838411819, "grad_norm": 0.5431872010231018, "learning_rate": 2.9168666666666667e-05, "loss": 0.0131, "step": 12497 }, { "epoch": 23.05909510618652, "grad_norm": 0.19743503630161285, "learning_rate": 2.9168333333333333e-05, "loss": 0.0043, "step": 12498 }, { "epoch": 23.060941828254848, "grad_norm": 0.2094327211380005, "learning_rate": 2.9168e-05, "loss": 0.0029, "step": 12499 }, { "epoch": 23.062788550323177, "grad_norm": 0.2092137336730957, "learning_rate": 2.916766666666667e-05, "loss": 0.0048, "step": 12500 }, { "epoch": 23.064635272391506, "grad_norm": 0.1424982249736786, "learning_rate": 2.9167333333333334e-05, "loss": 0.0054, "step": 12501 }, { "epoch": 23.066481994459835, "grad_norm": 0.20085524022579193, "learning_rate": 2.9167e-05, "loss": 0.0033, "step": 12502 }, { "epoch": 23.068328716528164, "grad_norm": 0.24300634860992432, "learning_rate": 2.9166666666666666e-05, "loss": 0.0124, "step": 12503 }, { "epoch": 23.07017543859649, "grad_norm": 0.24897274374961853, "learning_rate": 2.9166333333333336e-05, "loss": 0.0106, "step": 12504 }, { "epoch": 23.07202216066482, "grad_norm": 0.7429565787315369, "learning_rate": 2.9165999999999998e-05, "loss": 0.0112, "step": 12505 }, { "epoch": 23.073868882733148, "grad_norm": 0.12227975577116013, "learning_rate": 2.9165666666666668e-05, "loss": 0.0058, "step": 12506 }, { "epoch": 23.075715604801477, "grad_norm": 0.21596293151378632, "learning_rate": 2.9165333333333334e-05, "loss": 0.0071, "step": 12507 }, { "epoch": 23.077562326869806, "grad_norm": 0.14186695218086243, "learning_rate": 2.9165e-05, "loss": 0.0026, "step": 12508 }, { "epoch": 23.079409048938135, "grad_norm": 0.307436466217041, "learning_rate": 2.916466666666667e-05, "loss": 0.0049, "step": 12509 }, { "epoch": 23.081255771006465, "grad_norm": 0.42267316579818726, "learning_rate": 2.9164333333333335e-05, "loss": 0.0093, "step": 12510 }, { "epoch": 23.083102493074794, "grad_norm": 0.1583794504404068, "learning_rate": 2.9164e-05, "loss": 0.0048, "step": 12511 }, { "epoch": 23.08494921514312, "grad_norm": 0.30443325638771057, "learning_rate": 2.9163666666666667e-05, "loss": 0.0095, "step": 12512 }, { "epoch": 23.08679593721145, "grad_norm": 0.276859849691391, "learning_rate": 2.9163333333333336e-05, "loss": 0.0065, "step": 12513 }, { "epoch": 23.088642659279778, "grad_norm": 0.49172136187553406, "learning_rate": 2.9163e-05, "loss": 0.0111, "step": 12514 }, { "epoch": 23.090489381348107, "grad_norm": 0.47428908944129944, "learning_rate": 2.9162666666666668e-05, "loss": 0.0068, "step": 12515 }, { "epoch": 23.092336103416436, "grad_norm": 0.16608187556266785, "learning_rate": 2.9162333333333334e-05, "loss": 0.0046, "step": 12516 }, { "epoch": 23.094182825484765, "grad_norm": 0.6573468446731567, "learning_rate": 2.9162e-05, "loss": 0.1058, "step": 12517 }, { "epoch": 23.096029547553094, "grad_norm": 0.545746386051178, "learning_rate": 2.916166666666667e-05, "loss": 0.0954, "step": 12518 }, { "epoch": 23.097876269621423, "grad_norm": 0.5941588878631592, "learning_rate": 2.9161333333333335e-05, "loss": 0.1491, "step": 12519 }, { "epoch": 23.099722991689752, "grad_norm": 0.4453190863132477, "learning_rate": 2.9161e-05, "loss": 0.0645, "step": 12520 }, { "epoch": 23.101569713758078, "grad_norm": 0.3122742176055908, "learning_rate": 2.9160666666666667e-05, "loss": 0.0887, "step": 12521 }, { "epoch": 23.103416435826407, "grad_norm": 0.9442075490951538, "learning_rate": 2.9160333333333336e-05, "loss": 0.0764, "step": 12522 }, { "epoch": 23.105263157894736, "grad_norm": 0.2535693049430847, "learning_rate": 2.916e-05, "loss": 0.0327, "step": 12523 }, { "epoch": 23.107109879963065, "grad_norm": 0.45661574602127075, "learning_rate": 2.9159666666666668e-05, "loss": 0.0532, "step": 12524 }, { "epoch": 23.108956602031395, "grad_norm": 0.6820226311683655, "learning_rate": 2.9159333333333337e-05, "loss": 0.0371, "step": 12525 }, { "epoch": 23.110803324099724, "grad_norm": 0.4742131233215332, "learning_rate": 2.9159e-05, "loss": 0.0313, "step": 12526 }, { "epoch": 23.112650046168053, "grad_norm": 0.8153624534606934, "learning_rate": 2.915866666666667e-05, "loss": 0.0468, "step": 12527 }, { "epoch": 23.114496768236382, "grad_norm": 0.3805418908596039, "learning_rate": 2.9158333333333335e-05, "loss": 0.0377, "step": 12528 }, { "epoch": 23.116343490304708, "grad_norm": 0.30506157875061035, "learning_rate": 2.9158e-05, "loss": 0.0674, "step": 12529 }, { "epoch": 23.118190212373037, "grad_norm": 0.7386313676834106, "learning_rate": 2.9157666666666667e-05, "loss": 0.0401, "step": 12530 }, { "epoch": 23.120036934441366, "grad_norm": 0.22930021584033966, "learning_rate": 2.9157333333333333e-05, "loss": 0.0078, "step": 12531 }, { "epoch": 23.121883656509695, "grad_norm": 0.22462855279445648, "learning_rate": 2.9157e-05, "loss": 0.0218, "step": 12532 }, { "epoch": 23.123730378578024, "grad_norm": 0.11458956450223923, "learning_rate": 2.9156666666666668e-05, "loss": 0.0035, "step": 12533 }, { "epoch": 23.125577100646353, "grad_norm": 0.18596674501895905, "learning_rate": 2.9156333333333334e-05, "loss": 0.0059, "step": 12534 }, { "epoch": 23.127423822714682, "grad_norm": 0.2123582661151886, "learning_rate": 2.9156e-05, "loss": 0.0067, "step": 12535 }, { "epoch": 23.12927054478301, "grad_norm": 0.2569544315338135, "learning_rate": 2.915566666666667e-05, "loss": 0.0405, "step": 12536 }, { "epoch": 23.131117266851337, "grad_norm": 0.22042743861675262, "learning_rate": 2.9155333333333332e-05, "loss": 0.0086, "step": 12537 }, { "epoch": 23.132963988919666, "grad_norm": 0.2655360698699951, "learning_rate": 2.9155e-05, "loss": 0.006, "step": 12538 }, { "epoch": 23.134810710987995, "grad_norm": 0.24769195914268494, "learning_rate": 2.9154666666666667e-05, "loss": 0.008, "step": 12539 }, { "epoch": 23.136657433056325, "grad_norm": 0.25198256969451904, "learning_rate": 2.9154333333333333e-05, "loss": 0.0057, "step": 12540 }, { "epoch": 23.138504155124654, "grad_norm": 0.19294026494026184, "learning_rate": 2.9154e-05, "loss": 0.0054, "step": 12541 }, { "epoch": 23.140350877192983, "grad_norm": 0.2432713508605957, "learning_rate": 2.915366666666667e-05, "loss": 0.008, "step": 12542 }, { "epoch": 23.142197599261312, "grad_norm": 0.351470410823822, "learning_rate": 2.9153333333333334e-05, "loss": 0.0145, "step": 12543 }, { "epoch": 23.14404432132964, "grad_norm": 0.3115994930267334, "learning_rate": 2.9153e-05, "loss": 0.0134, "step": 12544 }, { "epoch": 23.14589104339797, "grad_norm": 0.39242538809776306, "learning_rate": 2.915266666666667e-05, "loss": 0.0063, "step": 12545 }, { "epoch": 23.147737765466296, "grad_norm": 0.11153402179479599, "learning_rate": 2.9152333333333332e-05, "loss": 0.0024, "step": 12546 }, { "epoch": 23.149584487534625, "grad_norm": 0.30442219972610474, "learning_rate": 2.9152e-05, "loss": 0.0049, "step": 12547 }, { "epoch": 23.151431209602954, "grad_norm": 0.2781001031398773, "learning_rate": 2.9151666666666667e-05, "loss": 0.0356, "step": 12548 }, { "epoch": 23.153277931671283, "grad_norm": 0.20190612971782684, "learning_rate": 2.9151333333333333e-05, "loss": 0.0047, "step": 12549 }, { "epoch": 23.155124653739612, "grad_norm": 0.3549395799636841, "learning_rate": 2.9151000000000003e-05, "loss": 0.0073, "step": 12550 }, { "epoch": 23.15697137580794, "grad_norm": 0.39812251925468445, "learning_rate": 2.915066666666667e-05, "loss": 0.0173, "step": 12551 }, { "epoch": 23.15881809787627, "grad_norm": 0.22466783225536346, "learning_rate": 2.9150333333333334e-05, "loss": 0.0056, "step": 12552 }, { "epoch": 23.1606648199446, "grad_norm": 0.06262242048978806, "learning_rate": 2.915e-05, "loss": 0.0017, "step": 12553 }, { "epoch": 23.162511542012926, "grad_norm": 0.5167449116706848, "learning_rate": 2.914966666666667e-05, "loss": 0.0075, "step": 12554 }, { "epoch": 23.164358264081255, "grad_norm": 0.1647091954946518, "learning_rate": 2.9149333333333332e-05, "loss": 0.0044, "step": 12555 }, { "epoch": 23.166204986149584, "grad_norm": 0.13090741634368896, "learning_rate": 2.9149e-05, "loss": 0.0031, "step": 12556 }, { "epoch": 23.168051708217913, "grad_norm": 0.3570130169391632, "learning_rate": 2.9148666666666668e-05, "loss": 0.0042, "step": 12557 }, { "epoch": 23.169898430286242, "grad_norm": 0.3519172966480255, "learning_rate": 2.9148333333333333e-05, "loss": 0.0079, "step": 12558 }, { "epoch": 23.17174515235457, "grad_norm": 0.22465786337852478, "learning_rate": 2.9148000000000003e-05, "loss": 0.0029, "step": 12559 }, { "epoch": 23.1735918744229, "grad_norm": 0.10870680212974548, "learning_rate": 2.9147666666666665e-05, "loss": 0.0026, "step": 12560 }, { "epoch": 23.17543859649123, "grad_norm": 0.19479309022426605, "learning_rate": 2.9147333333333335e-05, "loss": 0.006, "step": 12561 }, { "epoch": 23.177285318559555, "grad_norm": 0.26224997639656067, "learning_rate": 2.9147e-05, "loss": 0.0031, "step": 12562 }, { "epoch": 23.179132040627884, "grad_norm": 0.22105391323566437, "learning_rate": 2.9146666666666667e-05, "loss": 0.0039, "step": 12563 }, { "epoch": 23.180978762696213, "grad_norm": 0.3528079390525818, "learning_rate": 2.9146333333333332e-05, "loss": 0.006, "step": 12564 }, { "epoch": 23.182825484764543, "grad_norm": 0.19657863676548004, "learning_rate": 2.9146000000000002e-05, "loss": 0.0039, "step": 12565 }, { "epoch": 23.18467220683287, "grad_norm": 0.14275223016738892, "learning_rate": 2.9145666666666664e-05, "loss": 0.0017, "step": 12566 }, { "epoch": 23.1865189289012, "grad_norm": 0.6491601467132568, "learning_rate": 2.9145333333333334e-05, "loss": 0.1199, "step": 12567 }, { "epoch": 23.18836565096953, "grad_norm": 0.7567947506904602, "learning_rate": 2.9145000000000003e-05, "loss": 0.1151, "step": 12568 }, { "epoch": 23.19021237303786, "grad_norm": 0.3431910574436188, "learning_rate": 2.9144666666666666e-05, "loss": 0.068, "step": 12569 }, { "epoch": 23.19205909510619, "grad_norm": 0.40085679292678833, "learning_rate": 2.9144333333333335e-05, "loss": 0.0507, "step": 12570 }, { "epoch": 23.193905817174514, "grad_norm": 0.5359059572219849, "learning_rate": 2.9144e-05, "loss": 0.0545, "step": 12571 }, { "epoch": 23.195752539242843, "grad_norm": 0.7867258787155151, "learning_rate": 2.9143666666666667e-05, "loss": 0.1062, "step": 12572 }, { "epoch": 23.197599261311172, "grad_norm": 0.3034769892692566, "learning_rate": 2.9143333333333333e-05, "loss": 0.0363, "step": 12573 }, { "epoch": 23.1994459833795, "grad_norm": 1.3643563985824585, "learning_rate": 2.9143000000000002e-05, "loss": 0.0534, "step": 12574 }, { "epoch": 23.20129270544783, "grad_norm": 0.30873343348503113, "learning_rate": 2.9142666666666668e-05, "loss": 0.0444, "step": 12575 }, { "epoch": 23.20313942751616, "grad_norm": 0.3701000213623047, "learning_rate": 2.9142333333333334e-05, "loss": 0.0196, "step": 12576 }, { "epoch": 23.20498614958449, "grad_norm": 0.29086777567863464, "learning_rate": 2.9142000000000003e-05, "loss": 0.0192, "step": 12577 }, { "epoch": 23.206832871652818, "grad_norm": 0.3202081024646759, "learning_rate": 2.9141666666666666e-05, "loss": 0.0642, "step": 12578 }, { "epoch": 23.208679593721143, "grad_norm": 0.37413352727890015, "learning_rate": 2.9141333333333335e-05, "loss": 0.0289, "step": 12579 }, { "epoch": 23.210526315789473, "grad_norm": 0.23711729049682617, "learning_rate": 2.9141e-05, "loss": 0.0334, "step": 12580 }, { "epoch": 23.2123730378578, "grad_norm": 0.2957731783390045, "learning_rate": 2.9140666666666667e-05, "loss": 0.0108, "step": 12581 }, { "epoch": 23.21421975992613, "grad_norm": 1.019913911819458, "learning_rate": 2.9140333333333333e-05, "loss": 0.027, "step": 12582 }, { "epoch": 23.21606648199446, "grad_norm": 0.29194220900535583, "learning_rate": 2.9140000000000002e-05, "loss": 0.0112, "step": 12583 }, { "epoch": 23.21791320406279, "grad_norm": 0.33904239535331726, "learning_rate": 2.9139666666666668e-05, "loss": 0.0122, "step": 12584 }, { "epoch": 23.21975992613112, "grad_norm": 0.23502930998802185, "learning_rate": 2.9139333333333334e-05, "loss": 0.0105, "step": 12585 }, { "epoch": 23.221606648199447, "grad_norm": 0.08390850573778152, "learning_rate": 2.9139000000000003e-05, "loss": 0.0032, "step": 12586 }, { "epoch": 23.223453370267773, "grad_norm": 0.3864550292491913, "learning_rate": 2.9138666666666666e-05, "loss": 0.0066, "step": 12587 }, { "epoch": 23.225300092336102, "grad_norm": 0.37667298316955566, "learning_rate": 2.9138333333333335e-05, "loss": 0.0087, "step": 12588 }, { "epoch": 23.22714681440443, "grad_norm": 0.3234068751335144, "learning_rate": 2.9137999999999998e-05, "loss": 0.0078, "step": 12589 }, { "epoch": 23.22899353647276, "grad_norm": 0.145706906914711, "learning_rate": 2.9137666666666667e-05, "loss": 0.0045, "step": 12590 }, { "epoch": 23.23084025854109, "grad_norm": 0.1408831626176834, "learning_rate": 2.9137333333333333e-05, "loss": 0.0049, "step": 12591 }, { "epoch": 23.23268698060942, "grad_norm": 0.36585304141044617, "learning_rate": 2.9137e-05, "loss": 0.0084, "step": 12592 }, { "epoch": 23.234533702677748, "grad_norm": 0.1734265834093094, "learning_rate": 2.913666666666667e-05, "loss": 0.0043, "step": 12593 }, { "epoch": 23.236380424746077, "grad_norm": 0.24555036425590515, "learning_rate": 2.9136333333333334e-05, "loss": 0.0069, "step": 12594 }, { "epoch": 23.238227146814406, "grad_norm": 0.1656181663274765, "learning_rate": 2.9136e-05, "loss": 0.0042, "step": 12595 }, { "epoch": 23.24007386888273, "grad_norm": 0.27070939540863037, "learning_rate": 2.9135666666666666e-05, "loss": 0.0057, "step": 12596 }, { "epoch": 23.24192059095106, "grad_norm": 0.27400118112564087, "learning_rate": 2.9135333333333335e-05, "loss": 0.008, "step": 12597 }, { "epoch": 23.24376731301939, "grad_norm": 0.30686214566230774, "learning_rate": 2.9134999999999998e-05, "loss": 0.0042, "step": 12598 }, { "epoch": 23.24561403508772, "grad_norm": 0.46688786149024963, "learning_rate": 2.9134666666666667e-05, "loss": 0.0075, "step": 12599 }, { "epoch": 23.24746075715605, "grad_norm": 0.30184173583984375, "learning_rate": 2.9134333333333337e-05, "loss": 0.0058, "step": 12600 }, { "epoch": 23.249307479224377, "grad_norm": 0.15009987354278564, "learning_rate": 2.9134e-05, "loss": 0.0033, "step": 12601 }, { "epoch": 23.251154201292707, "grad_norm": 0.48980292677879333, "learning_rate": 2.913366666666667e-05, "loss": 0.0133, "step": 12602 }, { "epoch": 23.253000923361036, "grad_norm": 0.1655283272266388, "learning_rate": 2.9133333333333334e-05, "loss": 0.0046, "step": 12603 }, { "epoch": 23.25484764542936, "grad_norm": 0.665810763835907, "learning_rate": 2.9133e-05, "loss": 0.0266, "step": 12604 }, { "epoch": 23.25669436749769, "grad_norm": 1.3942642211914062, "learning_rate": 2.9132666666666666e-05, "loss": 0.0121, "step": 12605 }, { "epoch": 23.25854108956602, "grad_norm": 0.11559968441724777, "learning_rate": 2.9132333333333336e-05, "loss": 0.0024, "step": 12606 }, { "epoch": 23.26038781163435, "grad_norm": 0.14520831406116486, "learning_rate": 2.9131999999999998e-05, "loss": 0.0042, "step": 12607 }, { "epoch": 23.262234533702678, "grad_norm": 0.3412821590900421, "learning_rate": 2.9131666666666668e-05, "loss": 0.0064, "step": 12608 }, { "epoch": 23.264081255771007, "grad_norm": 0.28790393471717834, "learning_rate": 2.9131333333333337e-05, "loss": 0.0051, "step": 12609 }, { "epoch": 23.265927977839336, "grad_norm": 0.231611967086792, "learning_rate": 2.9131e-05, "loss": 0.0083, "step": 12610 }, { "epoch": 23.267774699907665, "grad_norm": 0.28216591477394104, "learning_rate": 2.913066666666667e-05, "loss": 0.0062, "step": 12611 }, { "epoch": 23.26962142197599, "grad_norm": 0.24869054555892944, "learning_rate": 2.9130333333333335e-05, "loss": 0.0048, "step": 12612 }, { "epoch": 23.27146814404432, "grad_norm": 0.27544718980789185, "learning_rate": 2.913e-05, "loss": 0.0252, "step": 12613 }, { "epoch": 23.27331486611265, "grad_norm": 0.6152492165565491, "learning_rate": 2.9129666666666667e-05, "loss": 0.0046, "step": 12614 }, { "epoch": 23.27516158818098, "grad_norm": 0.26752281188964844, "learning_rate": 2.9129333333333336e-05, "loss": 0.0058, "step": 12615 }, { "epoch": 23.277008310249307, "grad_norm": 0.31082385778427124, "learning_rate": 2.9129e-05, "loss": 0.0076, "step": 12616 }, { "epoch": 23.278855032317637, "grad_norm": 0.5576523542404175, "learning_rate": 2.9128666666666668e-05, "loss": 0.1092, "step": 12617 }, { "epoch": 23.280701754385966, "grad_norm": 0.5595287084579468, "learning_rate": 2.9128333333333337e-05, "loss": 0.1438, "step": 12618 }, { "epoch": 23.282548476454295, "grad_norm": 0.4700721800327301, "learning_rate": 2.9128e-05, "loss": 0.0899, "step": 12619 }, { "epoch": 23.284395198522624, "grad_norm": 0.5074760317802429, "learning_rate": 2.912766666666667e-05, "loss": 0.1053, "step": 12620 }, { "epoch": 23.28624192059095, "grad_norm": 0.3127371668815613, "learning_rate": 2.912733333333333e-05, "loss": 0.0583, "step": 12621 }, { "epoch": 23.28808864265928, "grad_norm": 0.510924756526947, "learning_rate": 2.9127e-05, "loss": 0.101, "step": 12622 }, { "epoch": 23.289935364727608, "grad_norm": 0.46187469363212585, "learning_rate": 2.9126666666666667e-05, "loss": 0.0423, "step": 12623 }, { "epoch": 23.291782086795937, "grad_norm": 0.37520408630371094, "learning_rate": 2.9126333333333333e-05, "loss": 0.073, "step": 12624 }, { "epoch": 23.293628808864266, "grad_norm": 0.3077435791492462, "learning_rate": 2.9126000000000002e-05, "loss": 0.0263, "step": 12625 }, { "epoch": 23.295475530932595, "grad_norm": 0.32046741247177124, "learning_rate": 2.9125666666666668e-05, "loss": 0.058, "step": 12626 }, { "epoch": 23.297322253000925, "grad_norm": 0.5632917284965515, "learning_rate": 2.9125333333333334e-05, "loss": 0.0277, "step": 12627 }, { "epoch": 23.299168975069254, "grad_norm": 0.7035937905311584, "learning_rate": 2.9125e-05, "loss": 0.0307, "step": 12628 }, { "epoch": 23.30101569713758, "grad_norm": 0.794179379940033, "learning_rate": 2.912466666666667e-05, "loss": 0.022, "step": 12629 }, { "epoch": 23.30286241920591, "grad_norm": 0.3423013389110565, "learning_rate": 2.912433333333333e-05, "loss": 0.0282, "step": 12630 }, { "epoch": 23.304709141274238, "grad_norm": 0.2058839648962021, "learning_rate": 2.9124e-05, "loss": 0.0126, "step": 12631 }, { "epoch": 23.306555863342567, "grad_norm": 0.270408570766449, "learning_rate": 2.9123666666666667e-05, "loss": 0.0207, "step": 12632 }, { "epoch": 23.308402585410896, "grad_norm": 0.3184267580509186, "learning_rate": 2.9123333333333333e-05, "loss": 0.0294, "step": 12633 }, { "epoch": 23.310249307479225, "grad_norm": 0.41973748803138733, "learning_rate": 2.9123000000000002e-05, "loss": 0.0124, "step": 12634 }, { "epoch": 23.312096029547554, "grad_norm": 0.1604977697134018, "learning_rate": 2.9122666666666668e-05, "loss": 0.0039, "step": 12635 }, { "epoch": 23.313942751615883, "grad_norm": 0.5372335314750671, "learning_rate": 2.9122333333333334e-05, "loss": 0.0087, "step": 12636 }, { "epoch": 23.31578947368421, "grad_norm": 0.17940805852413177, "learning_rate": 2.9122e-05, "loss": 0.0081, "step": 12637 }, { "epoch": 23.317636195752538, "grad_norm": 0.187593013048172, "learning_rate": 2.912166666666667e-05, "loss": 0.0061, "step": 12638 }, { "epoch": 23.319482917820867, "grad_norm": 0.27255645394325256, "learning_rate": 2.9121333333333332e-05, "loss": 0.012, "step": 12639 }, { "epoch": 23.321329639889196, "grad_norm": 0.5173263549804688, "learning_rate": 2.9121e-05, "loss": 0.0131, "step": 12640 }, { "epoch": 23.323176361957525, "grad_norm": 0.3303021788597107, "learning_rate": 2.9120666666666667e-05, "loss": 0.013, "step": 12641 }, { "epoch": 23.325023084025855, "grad_norm": 0.14484189450740814, "learning_rate": 2.9120333333333333e-05, "loss": 0.0036, "step": 12642 }, { "epoch": 23.326869806094184, "grad_norm": 0.19277650117874146, "learning_rate": 2.9120000000000002e-05, "loss": 0.0054, "step": 12643 }, { "epoch": 23.328716528162513, "grad_norm": 0.39073553681373596, "learning_rate": 2.911966666666667e-05, "loss": 0.0081, "step": 12644 }, { "epoch": 23.330563250230842, "grad_norm": 0.23671211302280426, "learning_rate": 2.9119333333333334e-05, "loss": 0.0036, "step": 12645 }, { "epoch": 23.332409972299168, "grad_norm": 0.13604755699634552, "learning_rate": 2.9119e-05, "loss": 0.004, "step": 12646 }, { "epoch": 23.334256694367497, "grad_norm": 0.12806524336338043, "learning_rate": 2.911866666666667e-05, "loss": 0.0036, "step": 12647 }, { "epoch": 23.336103416435826, "grad_norm": 0.26302891969680786, "learning_rate": 2.9118333333333332e-05, "loss": 0.0084, "step": 12648 }, { "epoch": 23.337950138504155, "grad_norm": 0.32138726115226746, "learning_rate": 2.9118e-05, "loss": 0.0082, "step": 12649 }, { "epoch": 23.339796860572484, "grad_norm": 0.2769556939601898, "learning_rate": 2.9117666666666667e-05, "loss": 0.006, "step": 12650 }, { "epoch": 23.341643582640813, "grad_norm": 0.22334329783916473, "learning_rate": 2.9117333333333333e-05, "loss": 0.007, "step": 12651 }, { "epoch": 23.343490304709142, "grad_norm": 0.5758256316184998, "learning_rate": 2.9117000000000003e-05, "loss": 0.0087, "step": 12652 }, { "epoch": 23.34533702677747, "grad_norm": 1.134762167930603, "learning_rate": 2.9116666666666665e-05, "loss": 0.0071, "step": 12653 }, { "epoch": 23.347183748845797, "grad_norm": 0.18429134786128998, "learning_rate": 2.9116333333333334e-05, "loss": 0.0043, "step": 12654 }, { "epoch": 23.349030470914126, "grad_norm": 0.13381122052669525, "learning_rate": 2.9116e-05, "loss": 0.0041, "step": 12655 }, { "epoch": 23.350877192982455, "grad_norm": 0.37146177887916565, "learning_rate": 2.9115666666666666e-05, "loss": 0.0133, "step": 12656 }, { "epoch": 23.352723915050785, "grad_norm": 0.1729261875152588, "learning_rate": 2.9115333333333332e-05, "loss": 0.0048, "step": 12657 }, { "epoch": 23.354570637119114, "grad_norm": 0.21925175189971924, "learning_rate": 2.9115e-05, "loss": 0.0078, "step": 12658 }, { "epoch": 23.356417359187443, "grad_norm": 0.3351660370826721, "learning_rate": 2.9114666666666668e-05, "loss": 0.0081, "step": 12659 }, { "epoch": 23.358264081255772, "grad_norm": 0.150557741522789, "learning_rate": 2.9114333333333333e-05, "loss": 0.005, "step": 12660 }, { "epoch": 23.3601108033241, "grad_norm": 0.2575150430202484, "learning_rate": 2.9114000000000003e-05, "loss": 0.009, "step": 12661 }, { "epoch": 23.361957525392427, "grad_norm": 0.24913664162158966, "learning_rate": 2.9113666666666665e-05, "loss": 0.0057, "step": 12662 }, { "epoch": 23.363804247460756, "grad_norm": 0.13307268917560577, "learning_rate": 2.9113333333333335e-05, "loss": 0.0021, "step": 12663 }, { "epoch": 23.365650969529085, "grad_norm": 0.47522854804992676, "learning_rate": 2.9113e-05, "loss": 0.0133, "step": 12664 }, { "epoch": 23.367497691597414, "grad_norm": 0.2812003791332245, "learning_rate": 2.9112666666666667e-05, "loss": 0.0066, "step": 12665 }, { "epoch": 23.369344413665743, "grad_norm": 0.33105483651161194, "learning_rate": 2.9112333333333332e-05, "loss": 0.0053, "step": 12666 }, { "epoch": 23.371191135734072, "grad_norm": 0.5941159725189209, "learning_rate": 2.9112000000000002e-05, "loss": 0.1752, "step": 12667 }, { "epoch": 23.3730378578024, "grad_norm": 0.5663772821426392, "learning_rate": 2.9111666666666668e-05, "loss": 0.1289, "step": 12668 }, { "epoch": 23.37488457987073, "grad_norm": 0.4650385081768036, "learning_rate": 2.9111333333333334e-05, "loss": 0.0951, "step": 12669 }, { "epoch": 23.37673130193906, "grad_norm": 0.659968376159668, "learning_rate": 2.9111000000000003e-05, "loss": 0.0744, "step": 12670 }, { "epoch": 23.378578024007385, "grad_norm": 0.44175758957862854, "learning_rate": 2.9110666666666666e-05, "loss": 0.0556, "step": 12671 }, { "epoch": 23.380424746075715, "grad_norm": 0.5107433795928955, "learning_rate": 2.9110333333333335e-05, "loss": 0.0617, "step": 12672 }, { "epoch": 23.382271468144044, "grad_norm": 0.3211396634578705, "learning_rate": 2.911e-05, "loss": 0.0402, "step": 12673 }, { "epoch": 23.384118190212373, "grad_norm": 0.37351423501968384, "learning_rate": 2.9109666666666667e-05, "loss": 0.0478, "step": 12674 }, { "epoch": 23.385964912280702, "grad_norm": 0.2717868685722351, "learning_rate": 2.9109333333333336e-05, "loss": 0.024, "step": 12675 }, { "epoch": 23.38781163434903, "grad_norm": 0.40558385848999023, "learning_rate": 2.9109000000000002e-05, "loss": 0.0448, "step": 12676 }, { "epoch": 23.38965835641736, "grad_norm": 0.4204607605934143, "learning_rate": 2.9108666666666668e-05, "loss": 0.0393, "step": 12677 }, { "epoch": 23.39150507848569, "grad_norm": 0.3483840823173523, "learning_rate": 2.9108333333333334e-05, "loss": 0.0241, "step": 12678 }, { "epoch": 23.393351800554015, "grad_norm": 0.4850672483444214, "learning_rate": 2.9108000000000003e-05, "loss": 0.0657, "step": 12679 }, { "epoch": 23.395198522622344, "grad_norm": 0.31261759996414185, "learning_rate": 2.9107666666666666e-05, "loss": 0.0128, "step": 12680 }, { "epoch": 23.397045244690673, "grad_norm": 0.4061281085014343, "learning_rate": 2.9107333333333335e-05, "loss": 0.0239, "step": 12681 }, { "epoch": 23.398891966759003, "grad_norm": 0.22250206768512726, "learning_rate": 2.9106999999999998e-05, "loss": 0.0144, "step": 12682 }, { "epoch": 23.40073868882733, "grad_norm": 0.2188418060541153, "learning_rate": 2.9106666666666667e-05, "loss": 0.0074, "step": 12683 }, { "epoch": 23.40258541089566, "grad_norm": 0.18531258404254913, "learning_rate": 2.9106333333333336e-05, "loss": 0.0123, "step": 12684 }, { "epoch": 23.40443213296399, "grad_norm": 0.16225002706050873, "learning_rate": 2.9106e-05, "loss": 0.0046, "step": 12685 }, { "epoch": 23.40627885503232, "grad_norm": 0.37271827459335327, "learning_rate": 2.9105666666666668e-05, "loss": 0.0058, "step": 12686 }, { "epoch": 23.408125577100645, "grad_norm": 0.3324749767780304, "learning_rate": 2.9105333333333334e-05, "loss": 0.0076, "step": 12687 }, { "epoch": 23.409972299168974, "grad_norm": 0.07640788704156876, "learning_rate": 2.9105e-05, "loss": 0.0031, "step": 12688 }, { "epoch": 23.411819021237303, "grad_norm": 0.36134073138237, "learning_rate": 2.9104666666666666e-05, "loss": 0.0119, "step": 12689 }, { "epoch": 23.413665743305632, "grad_norm": 0.12606969475746155, "learning_rate": 2.9104333333333335e-05, "loss": 0.0025, "step": 12690 }, { "epoch": 23.41551246537396, "grad_norm": 0.38095346093177795, "learning_rate": 2.9103999999999998e-05, "loss": 0.0088, "step": 12691 }, { "epoch": 23.41735918744229, "grad_norm": 0.18201282620429993, "learning_rate": 2.9103666666666667e-05, "loss": 0.0078, "step": 12692 }, { "epoch": 23.41920590951062, "grad_norm": 0.24027188122272491, "learning_rate": 2.9103333333333336e-05, "loss": 0.0129, "step": 12693 }, { "epoch": 23.42105263157895, "grad_norm": 0.12497299909591675, "learning_rate": 2.9103e-05, "loss": 0.0044, "step": 12694 }, { "epoch": 23.422899353647278, "grad_norm": 0.3064292371273041, "learning_rate": 2.910266666666667e-05, "loss": 0.003, "step": 12695 }, { "epoch": 23.424746075715603, "grad_norm": 0.2261001467704773, "learning_rate": 2.9102333333333334e-05, "loss": 0.0072, "step": 12696 }, { "epoch": 23.426592797783933, "grad_norm": 0.2631829082965851, "learning_rate": 2.9102e-05, "loss": 0.0062, "step": 12697 }, { "epoch": 23.42843951985226, "grad_norm": 0.23572014272212982, "learning_rate": 2.9101666666666666e-05, "loss": 0.0045, "step": 12698 }, { "epoch": 23.43028624192059, "grad_norm": 0.21679142117500305, "learning_rate": 2.9101333333333335e-05, "loss": 0.0063, "step": 12699 }, { "epoch": 23.43213296398892, "grad_norm": 0.16654612123966217, "learning_rate": 2.9101e-05, "loss": 0.0059, "step": 12700 }, { "epoch": 23.43397968605725, "grad_norm": 0.39357325434684753, "learning_rate": 2.9100666666666667e-05, "loss": 0.0092, "step": 12701 }, { "epoch": 23.43582640812558, "grad_norm": 0.2027686983346939, "learning_rate": 2.9100333333333337e-05, "loss": 0.0027, "step": 12702 }, { "epoch": 23.437673130193907, "grad_norm": 0.18773828446865082, "learning_rate": 2.91e-05, "loss": 0.0043, "step": 12703 }, { "epoch": 23.439519852262233, "grad_norm": 0.1085064485669136, "learning_rate": 2.909966666666667e-05, "loss": 0.0034, "step": 12704 }, { "epoch": 23.441366574330562, "grad_norm": 0.24215655028820038, "learning_rate": 2.9099333333333334e-05, "loss": 0.0073, "step": 12705 }, { "epoch": 23.44321329639889, "grad_norm": 0.3163291811943054, "learning_rate": 2.9099e-05, "loss": 0.0096, "step": 12706 }, { "epoch": 23.44506001846722, "grad_norm": 0.1995258331298828, "learning_rate": 2.9098666666666666e-05, "loss": 0.0059, "step": 12707 }, { "epoch": 23.44690674053555, "grad_norm": 0.09423418343067169, "learning_rate": 2.9098333333333336e-05, "loss": 0.0024, "step": 12708 }, { "epoch": 23.44875346260388, "grad_norm": 0.1298067569732666, "learning_rate": 2.9098e-05, "loss": 0.0034, "step": 12709 }, { "epoch": 23.450600184672208, "grad_norm": 0.16497434675693512, "learning_rate": 2.9097666666666668e-05, "loss": 0.0068, "step": 12710 }, { "epoch": 23.452446906740537, "grad_norm": 0.16174855828285217, "learning_rate": 2.9097333333333333e-05, "loss": 0.0036, "step": 12711 }, { "epoch": 23.454293628808863, "grad_norm": 0.08075518906116486, "learning_rate": 2.9097e-05, "loss": 0.0018, "step": 12712 }, { "epoch": 23.45614035087719, "grad_norm": 0.2373785823583603, "learning_rate": 2.909666666666667e-05, "loss": 0.0108, "step": 12713 }, { "epoch": 23.45798707294552, "grad_norm": 0.23240111768245697, "learning_rate": 2.909633333333333e-05, "loss": 0.0214, "step": 12714 }, { "epoch": 23.45983379501385, "grad_norm": 0.19462589919567108, "learning_rate": 2.9096e-05, "loss": 0.006, "step": 12715 }, { "epoch": 23.46168051708218, "grad_norm": 0.2087223082780838, "learning_rate": 2.9095666666666667e-05, "loss": 0.0174, "step": 12716 }, { "epoch": 23.46352723915051, "grad_norm": 0.4997592866420746, "learning_rate": 2.9095333333333332e-05, "loss": 0.1128, "step": 12717 }, { "epoch": 23.465373961218837, "grad_norm": 0.5647820830345154, "learning_rate": 2.9095000000000002e-05, "loss": 0.1108, "step": 12718 }, { "epoch": 23.467220683287167, "grad_norm": 0.5153325796127319, "learning_rate": 2.9094666666666668e-05, "loss": 0.0906, "step": 12719 }, { "epoch": 23.469067405355496, "grad_norm": 0.560015082359314, "learning_rate": 2.9094333333333334e-05, "loss": 0.1163, "step": 12720 }, { "epoch": 23.47091412742382, "grad_norm": 0.5469334721565247, "learning_rate": 2.9094e-05, "loss": 0.0669, "step": 12721 }, { "epoch": 23.47276084949215, "grad_norm": 0.5907200574874878, "learning_rate": 2.909366666666667e-05, "loss": 0.0516, "step": 12722 }, { "epoch": 23.47460757156048, "grad_norm": 0.40143343806266785, "learning_rate": 2.909333333333333e-05, "loss": 0.0358, "step": 12723 }, { "epoch": 23.47645429362881, "grad_norm": 0.37248697876930237, "learning_rate": 2.9093e-05, "loss": 0.031, "step": 12724 }, { "epoch": 23.478301015697138, "grad_norm": 0.6797778010368347, "learning_rate": 2.909266666666667e-05, "loss": 0.0435, "step": 12725 }, { "epoch": 23.480147737765467, "grad_norm": 1.5801701545715332, "learning_rate": 2.9092333333333333e-05, "loss": 0.0214, "step": 12726 }, { "epoch": 23.481994459833796, "grad_norm": 0.26523342728614807, "learning_rate": 2.9092000000000002e-05, "loss": 0.029, "step": 12727 }, { "epoch": 23.483841181902125, "grad_norm": 0.2564677298069, "learning_rate": 2.9091666666666668e-05, "loss": 0.0188, "step": 12728 }, { "epoch": 23.48568790397045, "grad_norm": 0.31995120644569397, "learning_rate": 2.9091333333333334e-05, "loss": 0.0285, "step": 12729 }, { "epoch": 23.48753462603878, "grad_norm": 0.2464805245399475, "learning_rate": 2.9091e-05, "loss": 0.0092, "step": 12730 }, { "epoch": 23.48938134810711, "grad_norm": 0.30991116166114807, "learning_rate": 2.909066666666667e-05, "loss": 0.0168, "step": 12731 }, { "epoch": 23.49122807017544, "grad_norm": 0.2520284056663513, "learning_rate": 2.909033333333333e-05, "loss": 0.0107, "step": 12732 }, { "epoch": 23.493074792243767, "grad_norm": 0.2349097579717636, "learning_rate": 2.909e-05, "loss": 0.0176, "step": 12733 }, { "epoch": 23.494921514312097, "grad_norm": 0.4836427867412567, "learning_rate": 2.908966666666667e-05, "loss": 0.0101, "step": 12734 }, { "epoch": 23.496768236380426, "grad_norm": 1.5186824798583984, "learning_rate": 2.9089333333333333e-05, "loss": 0.0418, "step": 12735 }, { "epoch": 23.498614958448755, "grad_norm": 0.25052887201309204, "learning_rate": 2.9089000000000002e-05, "loss": 0.0119, "step": 12736 }, { "epoch": 23.50046168051708, "grad_norm": 0.2631968855857849, "learning_rate": 2.9088666666666668e-05, "loss": 0.0337, "step": 12737 }, { "epoch": 23.50230840258541, "grad_norm": 0.18188248574733734, "learning_rate": 2.9088333333333334e-05, "loss": 0.0042, "step": 12738 }, { "epoch": 23.50415512465374, "grad_norm": 0.5120291709899902, "learning_rate": 2.9088e-05, "loss": 0.0093, "step": 12739 }, { "epoch": 23.506001846722068, "grad_norm": 0.12005823105573654, "learning_rate": 2.9087666666666666e-05, "loss": 0.0039, "step": 12740 }, { "epoch": 23.507848568790397, "grad_norm": 0.28853529691696167, "learning_rate": 2.9087333333333332e-05, "loss": 0.0082, "step": 12741 }, { "epoch": 23.509695290858726, "grad_norm": 0.1488683521747589, "learning_rate": 2.9087e-05, "loss": 0.0051, "step": 12742 }, { "epoch": 23.511542012927055, "grad_norm": 0.6102681159973145, "learning_rate": 2.9086666666666667e-05, "loss": 0.0086, "step": 12743 }, { "epoch": 23.513388734995385, "grad_norm": 0.30232924222946167, "learning_rate": 2.9086333333333333e-05, "loss": 0.0079, "step": 12744 }, { "epoch": 23.51523545706371, "grad_norm": 0.3770429790019989, "learning_rate": 2.9086000000000002e-05, "loss": 0.0087, "step": 12745 }, { "epoch": 23.51708217913204, "grad_norm": 0.27687808871269226, "learning_rate": 2.9085666666666665e-05, "loss": 0.0072, "step": 12746 }, { "epoch": 23.51892890120037, "grad_norm": 0.4040433168411255, "learning_rate": 2.9085333333333334e-05, "loss": 0.0291, "step": 12747 }, { "epoch": 23.520775623268698, "grad_norm": 0.4633181095123291, "learning_rate": 2.9085e-05, "loss": 0.01, "step": 12748 }, { "epoch": 23.522622345337027, "grad_norm": 0.21011197566986084, "learning_rate": 2.9084666666666666e-05, "loss": 0.0058, "step": 12749 }, { "epoch": 23.524469067405356, "grad_norm": 0.09449068456888199, "learning_rate": 2.9084333333333335e-05, "loss": 0.0026, "step": 12750 }, { "epoch": 23.526315789473685, "grad_norm": 0.22294382750988007, "learning_rate": 2.9084e-05, "loss": 0.0052, "step": 12751 }, { "epoch": 23.528162511542014, "grad_norm": 0.6660040616989136, "learning_rate": 2.9083666666666667e-05, "loss": 0.0324, "step": 12752 }, { "epoch": 23.530009233610343, "grad_norm": 0.20625843107700348, "learning_rate": 2.9083333333333333e-05, "loss": 0.0064, "step": 12753 }, { "epoch": 23.53185595567867, "grad_norm": 0.21546894311904907, "learning_rate": 2.9083000000000003e-05, "loss": 0.0051, "step": 12754 }, { "epoch": 23.533702677746998, "grad_norm": 0.21883495151996613, "learning_rate": 2.9082666666666665e-05, "loss": 0.0059, "step": 12755 }, { "epoch": 23.535549399815327, "grad_norm": 1.180564522743225, "learning_rate": 2.9082333333333334e-05, "loss": 0.0141, "step": 12756 }, { "epoch": 23.537396121883656, "grad_norm": 0.13864454627037048, "learning_rate": 2.9082e-05, "loss": 0.0026, "step": 12757 }, { "epoch": 23.539242843951985, "grad_norm": 0.2007858157157898, "learning_rate": 2.9081666666666666e-05, "loss": 0.0038, "step": 12758 }, { "epoch": 23.541089566020315, "grad_norm": 0.27783122658729553, "learning_rate": 2.9081333333333336e-05, "loss": 0.007, "step": 12759 }, { "epoch": 23.542936288088644, "grad_norm": 0.19886739552021027, "learning_rate": 2.9081e-05, "loss": 0.0057, "step": 12760 }, { "epoch": 23.544783010156973, "grad_norm": 0.23898394405841827, "learning_rate": 2.9080666666666668e-05, "loss": 0.0054, "step": 12761 }, { "epoch": 23.5466297322253, "grad_norm": 0.22545062005519867, "learning_rate": 2.9080333333333333e-05, "loss": 0.0046, "step": 12762 }, { "epoch": 23.548476454293628, "grad_norm": 0.16324621438980103, "learning_rate": 2.9080000000000003e-05, "loss": 0.002, "step": 12763 }, { "epoch": 23.550323176361957, "grad_norm": 0.19871051609516144, "learning_rate": 2.9079666666666665e-05, "loss": 0.0041, "step": 12764 }, { "epoch": 23.552169898430286, "grad_norm": 0.31745415925979614, "learning_rate": 2.9079333333333335e-05, "loss": 0.0033, "step": 12765 }, { "epoch": 23.554016620498615, "grad_norm": 0.6388939619064331, "learning_rate": 2.9079e-05, "loss": 0.0151, "step": 12766 }, { "epoch": 23.555863342566944, "grad_norm": 0.46394413709640503, "learning_rate": 2.9078666666666667e-05, "loss": 0.1668, "step": 12767 }, { "epoch": 23.557710064635273, "grad_norm": 0.5517489910125732, "learning_rate": 2.9078333333333336e-05, "loss": 0.0812, "step": 12768 }, { "epoch": 23.559556786703602, "grad_norm": 0.46741852164268494, "learning_rate": 2.9078000000000002e-05, "loss": 0.0696, "step": 12769 }, { "epoch": 23.56140350877193, "grad_norm": 0.44784027338027954, "learning_rate": 2.9077666666666668e-05, "loss": 0.0636, "step": 12770 }, { "epoch": 23.563250230840257, "grad_norm": 0.4539169669151306, "learning_rate": 2.9077333333333334e-05, "loss": 0.0528, "step": 12771 }, { "epoch": 23.565096952908586, "grad_norm": 0.3409072458744049, "learning_rate": 2.9077e-05, "loss": 0.0333, "step": 12772 }, { "epoch": 23.566943674976915, "grad_norm": 0.4125320017337799, "learning_rate": 2.9076666666666666e-05, "loss": 0.0608, "step": 12773 }, { "epoch": 23.568790397045245, "grad_norm": 0.4366224408149719, "learning_rate": 2.9076333333333335e-05, "loss": 0.0594, "step": 12774 }, { "epoch": 23.570637119113574, "grad_norm": 0.32634279131889343, "learning_rate": 2.9076e-05, "loss": 0.0346, "step": 12775 }, { "epoch": 23.572483841181903, "grad_norm": 0.4740084409713745, "learning_rate": 2.9075666666666667e-05, "loss": 0.0271, "step": 12776 }, { "epoch": 23.574330563250232, "grad_norm": 0.45157182216644287, "learning_rate": 2.9075333333333336e-05, "loss": 0.0275, "step": 12777 }, { "epoch": 23.57617728531856, "grad_norm": 0.4033220410346985, "learning_rate": 2.9075e-05, "loss": 0.0467, "step": 12778 }, { "epoch": 23.578024007386887, "grad_norm": 0.5562022924423218, "learning_rate": 2.9074666666666668e-05, "loss": 0.0194, "step": 12779 }, { "epoch": 23.579870729455216, "grad_norm": 0.30805903673171997, "learning_rate": 2.9074333333333334e-05, "loss": 0.0141, "step": 12780 }, { "epoch": 23.581717451523545, "grad_norm": 0.45251429080963135, "learning_rate": 2.9074e-05, "loss": 0.024, "step": 12781 }, { "epoch": 23.583564173591874, "grad_norm": 0.3250104784965515, "learning_rate": 2.9073666666666666e-05, "loss": 0.0216, "step": 12782 }, { "epoch": 23.585410895660203, "grad_norm": 0.11664088815450668, "learning_rate": 2.9073333333333335e-05, "loss": 0.0041, "step": 12783 }, { "epoch": 23.587257617728532, "grad_norm": 0.5777392387390137, "learning_rate": 2.9073e-05, "loss": 0.0263, "step": 12784 }, { "epoch": 23.58910433979686, "grad_norm": 0.32111817598342896, "learning_rate": 2.9072666666666667e-05, "loss": 0.0114, "step": 12785 }, { "epoch": 23.59095106186519, "grad_norm": 0.34782543778419495, "learning_rate": 2.9072333333333336e-05, "loss": 0.0136, "step": 12786 }, { "epoch": 23.592797783933516, "grad_norm": 0.539177656173706, "learning_rate": 2.9072e-05, "loss": 0.0094, "step": 12787 }, { "epoch": 23.594644506001845, "grad_norm": 0.1888371706008911, "learning_rate": 2.9071666666666668e-05, "loss": 0.0069, "step": 12788 }, { "epoch": 23.596491228070175, "grad_norm": 0.3934464156627655, "learning_rate": 2.9071333333333334e-05, "loss": 0.0128, "step": 12789 }, { "epoch": 23.598337950138504, "grad_norm": 0.21380853652954102, "learning_rate": 2.9071e-05, "loss": 0.0054, "step": 12790 }, { "epoch": 23.600184672206833, "grad_norm": 0.18468987941741943, "learning_rate": 2.9070666666666666e-05, "loss": 0.0048, "step": 12791 }, { "epoch": 23.602031394275162, "grad_norm": 0.10190550237894058, "learning_rate": 2.9070333333333335e-05, "loss": 0.0044, "step": 12792 }, { "epoch": 23.60387811634349, "grad_norm": 0.31460437178611755, "learning_rate": 2.907e-05, "loss": 0.0057, "step": 12793 }, { "epoch": 23.60572483841182, "grad_norm": 0.20910628139972687, "learning_rate": 2.9069666666666667e-05, "loss": 0.0061, "step": 12794 }, { "epoch": 23.607571560480146, "grad_norm": 0.2906193435192108, "learning_rate": 2.9069333333333336e-05, "loss": 0.0111, "step": 12795 }, { "epoch": 23.609418282548475, "grad_norm": 0.3938979506492615, "learning_rate": 2.9069e-05, "loss": 0.055, "step": 12796 }, { "epoch": 23.611265004616804, "grad_norm": 0.27934545278549194, "learning_rate": 2.906866666666667e-05, "loss": 0.0339, "step": 12797 }, { "epoch": 23.613111726685133, "grad_norm": 0.4780665338039398, "learning_rate": 2.9068333333333334e-05, "loss": 0.007, "step": 12798 }, { "epoch": 23.614958448753463, "grad_norm": 0.14447623491287231, "learning_rate": 2.9068e-05, "loss": 0.0028, "step": 12799 }, { "epoch": 23.61680517082179, "grad_norm": 0.4102858901023865, "learning_rate": 2.906766666666667e-05, "loss": 0.0126, "step": 12800 }, { "epoch": 23.61865189289012, "grad_norm": 0.41097262501716614, "learning_rate": 2.9067333333333332e-05, "loss": 0.0072, "step": 12801 }, { "epoch": 23.62049861495845, "grad_norm": 0.11427780240774155, "learning_rate": 2.9067e-05, "loss": 0.0031, "step": 12802 }, { "epoch": 23.62234533702678, "grad_norm": 0.3202766180038452, "learning_rate": 2.9066666666666667e-05, "loss": 0.0076, "step": 12803 }, { "epoch": 23.624192059095105, "grad_norm": 0.21148407459259033, "learning_rate": 2.9066333333333333e-05, "loss": 0.0045, "step": 12804 }, { "epoch": 23.626038781163434, "grad_norm": 0.4769347608089447, "learning_rate": 2.9066e-05, "loss": 0.0062, "step": 12805 }, { "epoch": 23.627885503231763, "grad_norm": 0.19845016300678253, "learning_rate": 2.906566666666667e-05, "loss": 0.0055, "step": 12806 }, { "epoch": 23.629732225300092, "grad_norm": 0.2139330506324768, "learning_rate": 2.906533333333333e-05, "loss": 0.0058, "step": 12807 }, { "epoch": 23.63157894736842, "grad_norm": 0.27285897731781006, "learning_rate": 2.9065e-05, "loss": 0.0104, "step": 12808 }, { "epoch": 23.63342566943675, "grad_norm": 0.4506590962409973, "learning_rate": 2.906466666666667e-05, "loss": 0.009, "step": 12809 }, { "epoch": 23.63527239150508, "grad_norm": 0.3636559247970581, "learning_rate": 2.9064333333333332e-05, "loss": 0.0044, "step": 12810 }, { "epoch": 23.63711911357341, "grad_norm": 0.10463052988052368, "learning_rate": 2.9064e-05, "loss": 0.0025, "step": 12811 }, { "epoch": 23.638965835641734, "grad_norm": 0.47499239444732666, "learning_rate": 2.9063666666666668e-05, "loss": 0.0088, "step": 12812 }, { "epoch": 23.640812557710063, "grad_norm": 0.23979461193084717, "learning_rate": 2.9063333333333333e-05, "loss": 0.0046, "step": 12813 }, { "epoch": 23.642659279778393, "grad_norm": 0.3647124171257019, "learning_rate": 2.9063e-05, "loss": 0.0087, "step": 12814 }, { "epoch": 23.64450600184672, "grad_norm": 0.25506895780563354, "learning_rate": 2.906266666666667e-05, "loss": 0.0048, "step": 12815 }, { "epoch": 23.64635272391505, "grad_norm": 0.3625432252883911, "learning_rate": 2.9062333333333335e-05, "loss": 0.0099, "step": 12816 }, { "epoch": 23.64819944598338, "grad_norm": 0.5960791110992432, "learning_rate": 2.9062e-05, "loss": 0.1273, "step": 12817 }, { "epoch": 23.65004616805171, "grad_norm": 0.46018001437187195, "learning_rate": 2.906166666666667e-05, "loss": 0.1252, "step": 12818 }, { "epoch": 23.65189289012004, "grad_norm": 0.44288110733032227, "learning_rate": 2.9061333333333332e-05, "loss": 0.1154, "step": 12819 }, { "epoch": 23.653739612188367, "grad_norm": 0.4618895649909973, "learning_rate": 2.9061000000000002e-05, "loss": 0.0859, "step": 12820 }, { "epoch": 23.655586334256693, "grad_norm": 0.4462798237800598, "learning_rate": 2.9060666666666668e-05, "loss": 0.1258, "step": 12821 }, { "epoch": 23.657433056325022, "grad_norm": 0.5325228571891785, "learning_rate": 2.9060333333333334e-05, "loss": 0.0484, "step": 12822 }, { "epoch": 23.65927977839335, "grad_norm": 0.3383803069591522, "learning_rate": 2.906e-05, "loss": 0.041, "step": 12823 }, { "epoch": 23.66112650046168, "grad_norm": 0.3687587082386017, "learning_rate": 2.905966666666667e-05, "loss": 0.0365, "step": 12824 }, { "epoch": 23.66297322253001, "grad_norm": 0.4730265438556671, "learning_rate": 2.9059333333333335e-05, "loss": 0.077, "step": 12825 }, { "epoch": 23.66481994459834, "grad_norm": 0.41435232758522034, "learning_rate": 2.9059e-05, "loss": 0.0335, "step": 12826 }, { "epoch": 23.666666666666668, "grad_norm": 0.3603576123714447, "learning_rate": 2.905866666666667e-05, "loss": 0.0235, "step": 12827 }, { "epoch": 23.668513388734997, "grad_norm": 0.22493116557598114, "learning_rate": 2.9058333333333333e-05, "loss": 0.0158, "step": 12828 }, { "epoch": 23.670360110803323, "grad_norm": 0.5691091418266296, "learning_rate": 2.9058000000000002e-05, "loss": 0.0814, "step": 12829 }, { "epoch": 23.67220683287165, "grad_norm": 0.3875930607318878, "learning_rate": 2.9057666666666668e-05, "loss": 0.0219, "step": 12830 }, { "epoch": 23.67405355493998, "grad_norm": 0.7209383249282837, "learning_rate": 2.9057333333333334e-05, "loss": 0.0278, "step": 12831 }, { "epoch": 23.67590027700831, "grad_norm": 0.2960909605026245, "learning_rate": 2.9057e-05, "loss": 0.0321, "step": 12832 }, { "epoch": 23.67774699907664, "grad_norm": 0.3103616535663605, "learning_rate": 2.9056666666666666e-05, "loss": 0.0115, "step": 12833 }, { "epoch": 23.67959372114497, "grad_norm": 0.20014365017414093, "learning_rate": 2.9056333333333335e-05, "loss": 0.0082, "step": 12834 }, { "epoch": 23.681440443213297, "grad_norm": 0.1251969039440155, "learning_rate": 2.9056e-05, "loss": 0.0062, "step": 12835 }, { "epoch": 23.683287165281627, "grad_norm": 0.26539865136146545, "learning_rate": 2.9055666666666667e-05, "loss": 0.024, "step": 12836 }, { "epoch": 23.685133887349952, "grad_norm": 0.26761507987976074, "learning_rate": 2.9055333333333333e-05, "loss": 0.0085, "step": 12837 }, { "epoch": 23.68698060941828, "grad_norm": 0.077335886657238, "learning_rate": 2.9055000000000002e-05, "loss": 0.0043, "step": 12838 }, { "epoch": 23.68882733148661, "grad_norm": 0.24382653832435608, "learning_rate": 2.9054666666666665e-05, "loss": 0.007, "step": 12839 }, { "epoch": 23.69067405355494, "grad_norm": 0.2220059633255005, "learning_rate": 2.9054333333333334e-05, "loss": 0.0062, "step": 12840 }, { "epoch": 23.69252077562327, "grad_norm": 0.28311780095100403, "learning_rate": 2.9054e-05, "loss": 0.0221, "step": 12841 }, { "epoch": 23.694367497691598, "grad_norm": 0.1493522971868515, "learning_rate": 2.9053666666666666e-05, "loss": 0.0035, "step": 12842 }, { "epoch": 23.696214219759927, "grad_norm": 0.17693676054477692, "learning_rate": 2.9053333333333335e-05, "loss": 0.0089, "step": 12843 }, { "epoch": 23.698060941828256, "grad_norm": 0.21312695741653442, "learning_rate": 2.9053e-05, "loss": 0.0049, "step": 12844 }, { "epoch": 23.69990766389658, "grad_norm": 0.31813400983810425, "learning_rate": 2.9052666666666667e-05, "loss": 0.0073, "step": 12845 }, { "epoch": 23.70175438596491, "grad_norm": 0.1391538828611374, "learning_rate": 2.9052333333333333e-05, "loss": 0.0045, "step": 12846 }, { "epoch": 23.70360110803324, "grad_norm": 0.28718283772468567, "learning_rate": 2.9052000000000002e-05, "loss": 0.0061, "step": 12847 }, { "epoch": 23.70544783010157, "grad_norm": 0.27051207423210144, "learning_rate": 2.9051666666666665e-05, "loss": 0.0049, "step": 12848 }, { "epoch": 23.7072945521699, "grad_norm": 0.15699365735054016, "learning_rate": 2.9051333333333334e-05, "loss": 0.0048, "step": 12849 }, { "epoch": 23.709141274238227, "grad_norm": 0.5311880707740784, "learning_rate": 2.9051000000000004e-05, "loss": 0.0072, "step": 12850 }, { "epoch": 23.710987996306557, "grad_norm": 0.5135352611541748, "learning_rate": 2.9050666666666666e-05, "loss": 0.0092, "step": 12851 }, { "epoch": 23.712834718374886, "grad_norm": 0.30122503638267517, "learning_rate": 2.9050333333333335e-05, "loss": 0.0049, "step": 12852 }, { "epoch": 23.714681440443215, "grad_norm": 0.2334645688533783, "learning_rate": 2.905e-05, "loss": 0.0349, "step": 12853 }, { "epoch": 23.71652816251154, "grad_norm": 0.2619197368621826, "learning_rate": 2.9049666666666667e-05, "loss": 0.0044, "step": 12854 }, { "epoch": 23.71837488457987, "grad_norm": 0.23635706305503845, "learning_rate": 2.9049333333333333e-05, "loss": 0.0091, "step": 12855 }, { "epoch": 23.7202216066482, "grad_norm": 0.24201664328575134, "learning_rate": 2.9049000000000003e-05, "loss": 0.0053, "step": 12856 }, { "epoch": 23.722068328716528, "grad_norm": 0.3879227340221405, "learning_rate": 2.9048666666666665e-05, "loss": 0.004, "step": 12857 }, { "epoch": 23.723915050784857, "grad_norm": 0.2202427089214325, "learning_rate": 2.9048333333333334e-05, "loss": 0.003, "step": 12858 }, { "epoch": 23.725761772853186, "grad_norm": 0.21150343120098114, "learning_rate": 2.9048000000000004e-05, "loss": 0.007, "step": 12859 }, { "epoch": 23.727608494921515, "grad_norm": 0.20980283617973328, "learning_rate": 2.9047666666666666e-05, "loss": 0.0042, "step": 12860 }, { "epoch": 23.729455216989845, "grad_norm": 0.22404924035072327, "learning_rate": 2.9047333333333336e-05, "loss": 0.0074, "step": 12861 }, { "epoch": 23.73130193905817, "grad_norm": 0.26028111577033997, "learning_rate": 2.9046999999999998e-05, "loss": 0.0068, "step": 12862 }, { "epoch": 23.7331486611265, "grad_norm": 0.2740020453929901, "learning_rate": 2.9046666666666668e-05, "loss": 0.0064, "step": 12863 }, { "epoch": 23.73499538319483, "grad_norm": 0.22199766337871552, "learning_rate": 2.9046333333333333e-05, "loss": 0.0032, "step": 12864 }, { "epoch": 23.736842105263158, "grad_norm": 0.3249658942222595, "learning_rate": 2.9046e-05, "loss": 0.0118, "step": 12865 }, { "epoch": 23.738688827331487, "grad_norm": 0.2935706377029419, "learning_rate": 2.904566666666667e-05, "loss": 0.0129, "step": 12866 }, { "epoch": 23.740535549399816, "grad_norm": 0.5398800373077393, "learning_rate": 2.9045333333333335e-05, "loss": 0.1306, "step": 12867 }, { "epoch": 23.742382271468145, "grad_norm": 0.3459707796573639, "learning_rate": 2.9045e-05, "loss": 0.0956, "step": 12868 }, { "epoch": 23.744228993536474, "grad_norm": 0.7129822969436646, "learning_rate": 2.9044666666666667e-05, "loss": 0.099, "step": 12869 }, { "epoch": 23.746075715604803, "grad_norm": 0.5271465182304382, "learning_rate": 2.9044333333333336e-05, "loss": 0.07, "step": 12870 }, { "epoch": 23.74792243767313, "grad_norm": 0.5197558403015137, "learning_rate": 2.9044e-05, "loss": 0.0734, "step": 12871 }, { "epoch": 23.749769159741458, "grad_norm": 0.3547579050064087, "learning_rate": 2.9043666666666668e-05, "loss": 0.0382, "step": 12872 }, { "epoch": 23.751615881809787, "grad_norm": 0.38486313819885254, "learning_rate": 2.9043333333333334e-05, "loss": 0.0638, "step": 12873 }, { "epoch": 23.753462603878116, "grad_norm": 0.2941659390926361, "learning_rate": 2.9043e-05, "loss": 0.0366, "step": 12874 }, { "epoch": 23.755309325946445, "grad_norm": 0.37029245495796204, "learning_rate": 2.904266666666667e-05, "loss": 0.0396, "step": 12875 }, { "epoch": 23.757156048014775, "grad_norm": 0.8271615505218506, "learning_rate": 2.9042333333333335e-05, "loss": 0.0512, "step": 12876 }, { "epoch": 23.759002770083104, "grad_norm": 0.5787763595581055, "learning_rate": 2.9042e-05, "loss": 0.023, "step": 12877 }, { "epoch": 23.760849492151433, "grad_norm": 0.3281150162220001, "learning_rate": 2.9041666666666667e-05, "loss": 0.0432, "step": 12878 }, { "epoch": 23.76269621421976, "grad_norm": 0.5119446516036987, "learning_rate": 2.9041333333333336e-05, "loss": 0.0633, "step": 12879 }, { "epoch": 23.764542936288088, "grad_norm": 0.27672407031059265, "learning_rate": 2.9041e-05, "loss": 0.036, "step": 12880 }, { "epoch": 23.766389658356417, "grad_norm": 0.44429293274879456, "learning_rate": 2.9040666666666668e-05, "loss": 0.0299, "step": 12881 }, { "epoch": 23.768236380424746, "grad_norm": 0.24825359880924225, "learning_rate": 2.9040333333333334e-05, "loss": 0.0329, "step": 12882 }, { "epoch": 23.770083102493075, "grad_norm": 0.45071160793304443, "learning_rate": 2.904e-05, "loss": 0.0113, "step": 12883 }, { "epoch": 23.771929824561404, "grad_norm": 0.26706889271736145, "learning_rate": 2.903966666666667e-05, "loss": 0.0067, "step": 12884 }, { "epoch": 23.773776546629733, "grad_norm": 0.41779765486717224, "learning_rate": 2.9039333333333335e-05, "loss": 0.027, "step": 12885 }, { "epoch": 23.775623268698062, "grad_norm": 0.23480276763439178, "learning_rate": 2.9039e-05, "loss": 0.0097, "step": 12886 }, { "epoch": 23.777469990766388, "grad_norm": 0.2854717969894409, "learning_rate": 2.9038666666666667e-05, "loss": 0.0125, "step": 12887 }, { "epoch": 23.779316712834717, "grad_norm": 0.21366217732429504, "learning_rate": 2.9038333333333336e-05, "loss": 0.0036, "step": 12888 }, { "epoch": 23.781163434903046, "grad_norm": 0.1949768364429474, "learning_rate": 2.9038e-05, "loss": 0.0098, "step": 12889 }, { "epoch": 23.783010156971375, "grad_norm": 0.13498984277248383, "learning_rate": 2.9037666666666668e-05, "loss": 0.0029, "step": 12890 }, { "epoch": 23.784856879039705, "grad_norm": 0.25774431228637695, "learning_rate": 2.9037333333333334e-05, "loss": 0.0073, "step": 12891 }, { "epoch": 23.786703601108034, "grad_norm": 0.3333340287208557, "learning_rate": 2.9037e-05, "loss": 0.0121, "step": 12892 }, { "epoch": 23.788550323176363, "grad_norm": 0.18044157326221466, "learning_rate": 2.903666666666667e-05, "loss": 0.0163, "step": 12893 }, { "epoch": 23.790397045244692, "grad_norm": 0.08477292209863663, "learning_rate": 2.9036333333333332e-05, "loss": 0.0021, "step": 12894 }, { "epoch": 23.792243767313018, "grad_norm": 0.11724770069122314, "learning_rate": 2.9036e-05, "loss": 0.0036, "step": 12895 }, { "epoch": 23.794090489381347, "grad_norm": 0.480721116065979, "learning_rate": 2.9035666666666667e-05, "loss": 0.0112, "step": 12896 }, { "epoch": 23.795937211449676, "grad_norm": 0.2716176509857178, "learning_rate": 2.9035333333333333e-05, "loss": 0.0064, "step": 12897 }, { "epoch": 23.797783933518005, "grad_norm": 0.3495538532733917, "learning_rate": 2.9035e-05, "loss": 0.006, "step": 12898 }, { "epoch": 23.799630655586334, "grad_norm": 0.1573408991098404, "learning_rate": 2.903466666666667e-05, "loss": 0.0038, "step": 12899 }, { "epoch": 23.801477377654663, "grad_norm": 0.2714599072933197, "learning_rate": 2.9034333333333334e-05, "loss": 0.0043, "step": 12900 }, { "epoch": 23.803324099722992, "grad_norm": 0.14191612601280212, "learning_rate": 2.9034e-05, "loss": 0.0038, "step": 12901 }, { "epoch": 23.80517082179132, "grad_norm": 0.5580388307571411, "learning_rate": 2.903366666666667e-05, "loss": 0.0123, "step": 12902 }, { "epoch": 23.80701754385965, "grad_norm": 0.3556264340877533, "learning_rate": 2.9033333333333332e-05, "loss": 0.0073, "step": 12903 }, { "epoch": 23.808864265927976, "grad_norm": 0.24814888834953308, "learning_rate": 2.9033e-05, "loss": 0.01, "step": 12904 }, { "epoch": 23.810710987996305, "grad_norm": 0.35184523463249207, "learning_rate": 2.9032666666666667e-05, "loss": 0.0043, "step": 12905 }, { "epoch": 23.812557710064635, "grad_norm": 0.1633293479681015, "learning_rate": 2.9032333333333333e-05, "loss": 0.0047, "step": 12906 }, { "epoch": 23.814404432132964, "grad_norm": 0.1968468427658081, "learning_rate": 2.9032e-05, "loss": 0.0063, "step": 12907 }, { "epoch": 23.816251154201293, "grad_norm": 0.3138710558414459, "learning_rate": 2.903166666666667e-05, "loss": 0.0059, "step": 12908 }, { "epoch": 23.818097876269622, "grad_norm": 0.19953186810016632, "learning_rate": 2.9031333333333334e-05, "loss": 0.0058, "step": 12909 }, { "epoch": 23.81994459833795, "grad_norm": 0.19568628072738647, "learning_rate": 2.9031e-05, "loss": 0.0069, "step": 12910 }, { "epoch": 23.82179132040628, "grad_norm": 0.3259732127189636, "learning_rate": 2.903066666666667e-05, "loss": 0.0063, "step": 12911 }, { "epoch": 23.823638042474606, "grad_norm": 0.1458972543478012, "learning_rate": 2.9030333333333332e-05, "loss": 0.0044, "step": 12912 }, { "epoch": 23.825484764542935, "grad_norm": 0.16559676826000214, "learning_rate": 2.903e-05, "loss": 0.0051, "step": 12913 }, { "epoch": 23.827331486611264, "grad_norm": 0.3490874171257019, "learning_rate": 2.9029666666666668e-05, "loss": 0.0059, "step": 12914 }, { "epoch": 23.829178208679593, "grad_norm": 0.5214719772338867, "learning_rate": 2.9029333333333333e-05, "loss": 0.0076, "step": 12915 }, { "epoch": 23.831024930747922, "grad_norm": 0.2873786985874176, "learning_rate": 2.9029000000000003e-05, "loss": 0.0113, "step": 12916 }, { "epoch": 23.83287165281625, "grad_norm": 0.7002087235450745, "learning_rate": 2.902866666666667e-05, "loss": 0.151, "step": 12917 }, { "epoch": 23.83471837488458, "grad_norm": 0.48249414563179016, "learning_rate": 2.9028333333333335e-05, "loss": 0.108, "step": 12918 }, { "epoch": 23.83656509695291, "grad_norm": 0.5469855070114136, "learning_rate": 2.9028e-05, "loss": 0.0875, "step": 12919 }, { "epoch": 23.83841181902124, "grad_norm": 0.3179090917110443, "learning_rate": 2.902766666666667e-05, "loss": 0.0698, "step": 12920 }, { "epoch": 23.840258541089565, "grad_norm": 0.30969929695129395, "learning_rate": 2.9027333333333332e-05, "loss": 0.0512, "step": 12921 }, { "epoch": 23.842105263157894, "grad_norm": 1.0350627899169922, "learning_rate": 2.9027000000000002e-05, "loss": 0.0404, "step": 12922 }, { "epoch": 23.843951985226223, "grad_norm": 0.5427265763282776, "learning_rate": 2.9026666666666664e-05, "loss": 0.0655, "step": 12923 }, { "epoch": 23.845798707294552, "grad_norm": 0.48509711027145386, "learning_rate": 2.9026333333333334e-05, "loss": 0.048, "step": 12924 }, { "epoch": 23.84764542936288, "grad_norm": 0.5452003479003906, "learning_rate": 2.9026000000000003e-05, "loss": 0.0208, "step": 12925 }, { "epoch": 23.84949215143121, "grad_norm": 0.361055850982666, "learning_rate": 2.9025666666666666e-05, "loss": 0.0243, "step": 12926 }, { "epoch": 23.85133887349954, "grad_norm": 0.3371514081954956, "learning_rate": 2.9025333333333335e-05, "loss": 0.0268, "step": 12927 }, { "epoch": 23.85318559556787, "grad_norm": 0.23774278163909912, "learning_rate": 2.9025e-05, "loss": 0.0188, "step": 12928 }, { "epoch": 23.855032317636194, "grad_norm": 0.364484041929245, "learning_rate": 2.9024666666666667e-05, "loss": 0.0227, "step": 12929 }, { "epoch": 23.856879039704523, "grad_norm": 0.42037510871887207, "learning_rate": 2.9024333333333333e-05, "loss": 0.0112, "step": 12930 }, { "epoch": 23.858725761772853, "grad_norm": 0.29534637928009033, "learning_rate": 2.9024000000000002e-05, "loss": 0.0173, "step": 12931 }, { "epoch": 23.86057248384118, "grad_norm": 3.09122896194458, "learning_rate": 2.9023666666666665e-05, "loss": 0.0062, "step": 12932 }, { "epoch": 23.86241920590951, "grad_norm": 0.28615516424179077, "learning_rate": 2.9023333333333334e-05, "loss": 0.0058, "step": 12933 }, { "epoch": 23.86426592797784, "grad_norm": 1.4320789575576782, "learning_rate": 2.9023000000000003e-05, "loss": 0.031, "step": 12934 }, { "epoch": 23.86611265004617, "grad_norm": 0.24899813532829285, "learning_rate": 2.9022666666666666e-05, "loss": 0.0061, "step": 12935 }, { "epoch": 23.8679593721145, "grad_norm": 0.1240304559469223, "learning_rate": 2.9022333333333335e-05, "loss": 0.0036, "step": 12936 }, { "epoch": 23.869806094182824, "grad_norm": 0.25697949528694153, "learning_rate": 2.9022e-05, "loss": 0.0068, "step": 12937 }, { "epoch": 23.871652816251153, "grad_norm": 0.23376378417015076, "learning_rate": 2.9021666666666667e-05, "loss": 0.0072, "step": 12938 }, { "epoch": 23.873499538319482, "grad_norm": 0.1371239274740219, "learning_rate": 2.9021333333333333e-05, "loss": 0.0055, "step": 12939 }, { "epoch": 23.87534626038781, "grad_norm": 0.3965795636177063, "learning_rate": 2.9021000000000002e-05, "loss": 0.0067, "step": 12940 }, { "epoch": 23.87719298245614, "grad_norm": 0.2709249258041382, "learning_rate": 2.9020666666666668e-05, "loss": 0.0106, "step": 12941 }, { "epoch": 23.87903970452447, "grad_norm": 0.5115156769752502, "learning_rate": 2.9020333333333334e-05, "loss": 0.0115, "step": 12942 }, { "epoch": 23.8808864265928, "grad_norm": 0.2460068166255951, "learning_rate": 2.9020000000000003e-05, "loss": 0.0069, "step": 12943 }, { "epoch": 23.882733148661128, "grad_norm": 0.14581836760044098, "learning_rate": 2.9019666666666666e-05, "loss": 0.0189, "step": 12944 }, { "epoch": 23.884579870729453, "grad_norm": 0.2299008071422577, "learning_rate": 2.9019333333333335e-05, "loss": 0.0056, "step": 12945 }, { "epoch": 23.886426592797783, "grad_norm": 0.15325182676315308, "learning_rate": 2.9019e-05, "loss": 0.0044, "step": 12946 }, { "epoch": 23.88827331486611, "grad_norm": 0.24629101157188416, "learning_rate": 2.9018666666666667e-05, "loss": 0.0108, "step": 12947 }, { "epoch": 23.89012003693444, "grad_norm": 0.25775256752967834, "learning_rate": 2.9018333333333333e-05, "loss": 0.0145, "step": 12948 }, { "epoch": 23.89196675900277, "grad_norm": 0.20524519681930542, "learning_rate": 2.9018000000000002e-05, "loss": 0.0046, "step": 12949 }, { "epoch": 23.8938134810711, "grad_norm": 0.21955180168151855, "learning_rate": 2.9017666666666668e-05, "loss": 0.0076, "step": 12950 }, { "epoch": 23.89566020313943, "grad_norm": 0.44401729106903076, "learning_rate": 2.9017333333333334e-05, "loss": 0.0039, "step": 12951 }, { "epoch": 23.897506925207757, "grad_norm": 0.29561930894851685, "learning_rate": 2.9017e-05, "loss": 0.0056, "step": 12952 }, { "epoch": 23.899353647276087, "grad_norm": 0.17952415347099304, "learning_rate": 2.9016666666666666e-05, "loss": 0.0042, "step": 12953 }, { "epoch": 23.901200369344412, "grad_norm": 0.41808587312698364, "learning_rate": 2.9016333333333335e-05, "loss": 0.0036, "step": 12954 }, { "epoch": 23.90304709141274, "grad_norm": 0.23725056648254395, "learning_rate": 2.9015999999999998e-05, "loss": 0.0055, "step": 12955 }, { "epoch": 23.90489381348107, "grad_norm": 0.372693806886673, "learning_rate": 2.9015666666666667e-05, "loss": 0.0084, "step": 12956 }, { "epoch": 23.9067405355494, "grad_norm": 0.19695430994033813, "learning_rate": 2.9015333333333333e-05, "loss": 0.007, "step": 12957 }, { "epoch": 23.90858725761773, "grad_norm": 0.13928771018981934, "learning_rate": 2.9015e-05, "loss": 0.0032, "step": 12958 }, { "epoch": 23.910433979686058, "grad_norm": 0.43361949920654297, "learning_rate": 2.901466666666667e-05, "loss": 0.0104, "step": 12959 }, { "epoch": 23.912280701754387, "grad_norm": 0.1476796269416809, "learning_rate": 2.9014333333333334e-05, "loss": 0.0049, "step": 12960 }, { "epoch": 23.914127423822716, "grad_norm": 0.2999615967273712, "learning_rate": 2.9014e-05, "loss": 0.0067, "step": 12961 }, { "epoch": 23.91597414589104, "grad_norm": 0.7123618125915527, "learning_rate": 2.9013666666666666e-05, "loss": 0.0119, "step": 12962 }, { "epoch": 23.91782086795937, "grad_norm": 0.18582561612129211, "learning_rate": 2.9013333333333336e-05, "loss": 0.0033, "step": 12963 }, { "epoch": 23.9196675900277, "grad_norm": 0.5258939862251282, "learning_rate": 2.9012999999999998e-05, "loss": 0.0071, "step": 12964 }, { "epoch": 23.92151431209603, "grad_norm": 0.6415529847145081, "learning_rate": 2.9012666666666668e-05, "loss": 0.0128, "step": 12965 }, { "epoch": 23.92336103416436, "grad_norm": 0.29516181349754333, "learning_rate": 2.9012333333333337e-05, "loss": 0.0132, "step": 12966 }, { "epoch": 23.925207756232687, "grad_norm": 0.7700099349021912, "learning_rate": 2.9012e-05, "loss": 0.1746, "step": 12967 }, { "epoch": 23.927054478301017, "grad_norm": 0.46317270398139954, "learning_rate": 2.901166666666667e-05, "loss": 0.1412, "step": 12968 }, { "epoch": 23.928901200369346, "grad_norm": 0.6192085146903992, "learning_rate": 2.9011333333333335e-05, "loss": 0.1088, "step": 12969 }, { "epoch": 23.930747922437675, "grad_norm": 0.6775713562965393, "learning_rate": 2.9011e-05, "loss": 0.0943, "step": 12970 }, { "epoch": 23.932594644506, "grad_norm": 0.6361328959465027, "learning_rate": 2.9010666666666667e-05, "loss": 0.0614, "step": 12971 }, { "epoch": 23.93444136657433, "grad_norm": 0.6036672592163086, "learning_rate": 2.9010333333333336e-05, "loss": 0.0678, "step": 12972 }, { "epoch": 23.93628808864266, "grad_norm": 0.37628716230392456, "learning_rate": 2.901e-05, "loss": 0.0702, "step": 12973 }, { "epoch": 23.938134810710988, "grad_norm": 0.34638822078704834, "learning_rate": 2.9009666666666668e-05, "loss": 0.0843, "step": 12974 }, { "epoch": 23.939981532779317, "grad_norm": 0.4206011891365051, "learning_rate": 2.9009333333333337e-05, "loss": 0.034, "step": 12975 }, { "epoch": 23.941828254847646, "grad_norm": 0.4541192352771759, "learning_rate": 2.9009e-05, "loss": 0.0732, "step": 12976 }, { "epoch": 23.943674976915975, "grad_norm": 0.3251061737537384, "learning_rate": 2.900866666666667e-05, "loss": 0.0167, "step": 12977 }, { "epoch": 23.945521698984304, "grad_norm": 0.4052632451057434, "learning_rate": 2.9008333333333335e-05, "loss": 0.0125, "step": 12978 }, { "epoch": 23.94736842105263, "grad_norm": 0.20792420208454132, "learning_rate": 2.9008e-05, "loss": 0.0106, "step": 12979 }, { "epoch": 23.94921514312096, "grad_norm": 0.2851109206676483, "learning_rate": 2.9007666666666667e-05, "loss": 0.0157, "step": 12980 }, { "epoch": 23.95106186518929, "grad_norm": 0.4680311381816864, "learning_rate": 2.9007333333333336e-05, "loss": 0.0161, "step": 12981 }, { "epoch": 23.952908587257618, "grad_norm": 0.1415628045797348, "learning_rate": 2.9007e-05, "loss": 0.0334, "step": 12982 }, { "epoch": 23.954755309325947, "grad_norm": 0.20743387937545776, "learning_rate": 2.9006666666666668e-05, "loss": 0.0088, "step": 12983 }, { "epoch": 23.956602031394276, "grad_norm": 0.20807644724845886, "learning_rate": 2.9006333333333334e-05, "loss": 0.0204, "step": 12984 }, { "epoch": 23.958448753462605, "grad_norm": 0.2604505121707916, "learning_rate": 2.9006e-05, "loss": 0.0062, "step": 12985 }, { "epoch": 23.960295475530934, "grad_norm": 0.22881126403808594, "learning_rate": 2.900566666666667e-05, "loss": 0.0053, "step": 12986 }, { "epoch": 23.96214219759926, "grad_norm": 0.11627691984176636, "learning_rate": 2.900533333333333e-05, "loss": 0.0041, "step": 12987 }, { "epoch": 23.96398891966759, "grad_norm": 0.24788479506969452, "learning_rate": 2.9005e-05, "loss": 0.0072, "step": 12988 }, { "epoch": 23.965835641735918, "grad_norm": 0.27122530341148376, "learning_rate": 2.9004666666666667e-05, "loss": 0.006, "step": 12989 }, { "epoch": 23.967682363804247, "grad_norm": 0.2032022923231125, "learning_rate": 2.9004333333333333e-05, "loss": 0.0043, "step": 12990 }, { "epoch": 23.969529085872576, "grad_norm": 0.31291407346725464, "learning_rate": 2.9004000000000002e-05, "loss": 0.004, "step": 12991 }, { "epoch": 23.971375807940905, "grad_norm": 0.38916656374931335, "learning_rate": 2.9003666666666668e-05, "loss": 0.0113, "step": 12992 }, { "epoch": 23.973222530009235, "grad_norm": 0.2948424518108368, "learning_rate": 2.9003333333333334e-05, "loss": 0.0118, "step": 12993 }, { "epoch": 23.975069252077564, "grad_norm": 0.223440483212471, "learning_rate": 2.9003e-05, "loss": 0.0082, "step": 12994 }, { "epoch": 23.97691597414589, "grad_norm": 0.4852774739265442, "learning_rate": 2.900266666666667e-05, "loss": 0.0103, "step": 12995 }, { "epoch": 23.97876269621422, "grad_norm": 0.2066236287355423, "learning_rate": 2.9002333333333332e-05, "loss": 0.0038, "step": 12996 }, { "epoch": 23.980609418282548, "grad_norm": 0.32647451758384705, "learning_rate": 2.9002e-05, "loss": 0.0078, "step": 12997 }, { "epoch": 23.982456140350877, "grad_norm": 0.21102245151996613, "learning_rate": 2.9001666666666667e-05, "loss": 0.005, "step": 12998 }, { "epoch": 23.984302862419206, "grad_norm": 0.4090909957885742, "learning_rate": 2.9001333333333333e-05, "loss": 0.0124, "step": 12999 }, { "epoch": 23.986149584487535, "grad_norm": 0.1885944902896881, "learning_rate": 2.9001000000000002e-05, "loss": 0.0041, "step": 13000 }, { "epoch": 23.986149584487535, "eval_cer": 0.11374653290820956, "eval_loss": 0.38288652896881104, "eval_runtime": 15.7412, "eval_samples_per_second": 19.312, "eval_steps_per_second": 0.635, "eval_wer": 0.3898695318495779, "step": 13000 }, { "epoch": 23.987996306555864, "grad_norm": 0.45336973667144775, "learning_rate": 2.9000666666666668e-05, "loss": 0.0112, "step": 13001 }, { "epoch": 23.989843028624193, "grad_norm": 0.5542886257171631, "learning_rate": 2.9000333333333334e-05, "loss": 0.0076, "step": 13002 }, { "epoch": 23.991689750692522, "grad_norm": 0.18405455350875854, "learning_rate": 2.9e-05, "loss": 0.0044, "step": 13003 }, { "epoch": 23.993536472760848, "grad_norm": 0.2096349596977234, "learning_rate": 2.899966666666667e-05, "loss": 0.006, "step": 13004 }, { "epoch": 23.995383194829177, "grad_norm": 0.2750890851020813, "learning_rate": 2.8999333333333332e-05, "loss": 0.0053, "step": 13005 }, { "epoch": 23.997229916897506, "grad_norm": 0.24388429522514343, "learning_rate": 2.8999e-05, "loss": 0.0051, "step": 13006 }, { "epoch": 23.999076638965835, "grad_norm": 0.6541364789009094, "learning_rate": 2.8998666666666667e-05, "loss": 0.0086, "step": 13007 }, { "epoch": 24.0, "grad_norm": 0.0675673708319664, "learning_rate": 2.8998333333333333e-05, "loss": 0.0008, "step": 13008 }, { "epoch": 24.00184672206833, "grad_norm": 0.7535595893859863, "learning_rate": 2.8998000000000003e-05, "loss": 0.1244, "step": 13009 }, { "epoch": 24.00369344413666, "grad_norm": 0.40268534421920776, "learning_rate": 2.899766666666667e-05, "loss": 0.1111, "step": 13010 }, { "epoch": 24.005540166204987, "grad_norm": 0.49000465869903564, "learning_rate": 2.8997333333333334e-05, "loss": 0.0655, "step": 13011 }, { "epoch": 24.007386888273317, "grad_norm": 0.6041421890258789, "learning_rate": 2.8997e-05, "loss": 0.052, "step": 13012 }, { "epoch": 24.009233610341642, "grad_norm": 0.37183427810668945, "learning_rate": 2.8996666666666666e-05, "loss": 0.0523, "step": 13013 }, { "epoch": 24.01108033240997, "grad_norm": 0.5852181315422058, "learning_rate": 2.8996333333333332e-05, "loss": 0.0852, "step": 13014 }, { "epoch": 24.0129270544783, "grad_norm": 0.32598167657852173, "learning_rate": 2.8996e-05, "loss": 0.0321, "step": 13015 }, { "epoch": 24.01477377654663, "grad_norm": 0.342527836561203, "learning_rate": 2.8995666666666667e-05, "loss": 0.0431, "step": 13016 }, { "epoch": 24.01662049861496, "grad_norm": 0.753602147102356, "learning_rate": 2.8995333333333333e-05, "loss": 0.0439, "step": 13017 }, { "epoch": 24.018467220683288, "grad_norm": 0.46627503633499146, "learning_rate": 2.8995000000000003e-05, "loss": 0.0432, "step": 13018 }, { "epoch": 24.020313942751617, "grad_norm": 0.37367913126945496, "learning_rate": 2.8994666666666665e-05, "loss": 0.0298, "step": 13019 }, { "epoch": 24.022160664819946, "grad_norm": 0.5896576046943665, "learning_rate": 2.8994333333333335e-05, "loss": 0.0205, "step": 13020 }, { "epoch": 24.02400738688827, "grad_norm": 0.46239790320396423, "learning_rate": 2.8994e-05, "loss": 0.0388, "step": 13021 }, { "epoch": 24.0258541089566, "grad_norm": 0.25117456912994385, "learning_rate": 2.8993666666666667e-05, "loss": 0.0259, "step": 13022 }, { "epoch": 24.02770083102493, "grad_norm": 0.20962220430374146, "learning_rate": 2.8993333333333332e-05, "loss": 0.0174, "step": 13023 }, { "epoch": 24.02954755309326, "grad_norm": 0.3992897570133209, "learning_rate": 2.8993000000000002e-05, "loss": 0.0439, "step": 13024 }, { "epoch": 24.03139427516159, "grad_norm": 0.14528046548366547, "learning_rate": 2.8992666666666668e-05, "loss": 0.0058, "step": 13025 }, { "epoch": 24.033240997229917, "grad_norm": 0.2308790236711502, "learning_rate": 2.8992333333333334e-05, "loss": 0.0047, "step": 13026 }, { "epoch": 24.035087719298247, "grad_norm": 0.12781424820423126, "learning_rate": 2.8992000000000003e-05, "loss": 0.0049, "step": 13027 }, { "epoch": 24.036934441366576, "grad_norm": 0.2648352384567261, "learning_rate": 2.8991666666666666e-05, "loss": 0.0061, "step": 13028 }, { "epoch": 24.0387811634349, "grad_norm": 0.16820089519023895, "learning_rate": 2.8991333333333335e-05, "loss": 0.0049, "step": 13029 }, { "epoch": 24.04062788550323, "grad_norm": 0.37286460399627686, "learning_rate": 2.8991e-05, "loss": 0.0055, "step": 13030 }, { "epoch": 24.04247460757156, "grad_norm": 0.3106335699558258, "learning_rate": 2.8990666666666667e-05, "loss": 0.0088, "step": 13031 }, { "epoch": 24.04432132963989, "grad_norm": 0.27128398418426514, "learning_rate": 2.8990333333333333e-05, "loss": 0.005, "step": 13032 }, { "epoch": 24.046168051708218, "grad_norm": 0.29108282923698425, "learning_rate": 2.8990000000000002e-05, "loss": 0.0094, "step": 13033 }, { "epoch": 24.048014773776547, "grad_norm": 0.2468157261610031, "learning_rate": 2.8989666666666668e-05, "loss": 0.006, "step": 13034 }, { "epoch": 24.049861495844876, "grad_norm": 0.28584250807762146, "learning_rate": 2.8989333333333334e-05, "loss": 0.0074, "step": 13035 }, { "epoch": 24.051708217913205, "grad_norm": 0.2079189568758011, "learning_rate": 2.8989000000000003e-05, "loss": 0.0043, "step": 13036 }, { "epoch": 24.053554939981534, "grad_norm": 0.4608403146266937, "learning_rate": 2.8988666666666666e-05, "loss": 0.0071, "step": 13037 }, { "epoch": 24.05540166204986, "grad_norm": 0.09350863099098206, "learning_rate": 2.8988333333333335e-05, "loss": 0.0022, "step": 13038 }, { "epoch": 24.05724838411819, "grad_norm": 0.48382362723350525, "learning_rate": 2.8988e-05, "loss": 0.0173, "step": 13039 }, { "epoch": 24.05909510618652, "grad_norm": 0.44276735186576843, "learning_rate": 2.8987666666666667e-05, "loss": 0.0077, "step": 13040 }, { "epoch": 24.060941828254848, "grad_norm": 0.3569282591342926, "learning_rate": 2.8987333333333336e-05, "loss": 0.0081, "step": 13041 }, { "epoch": 24.062788550323177, "grad_norm": 0.11440275609493256, "learning_rate": 2.8987000000000002e-05, "loss": 0.0015, "step": 13042 }, { "epoch": 24.064635272391506, "grad_norm": 0.11149106919765472, "learning_rate": 2.8986666666666668e-05, "loss": 0.0027, "step": 13043 }, { "epoch": 24.066481994459835, "grad_norm": 0.576856255531311, "learning_rate": 2.8986333333333334e-05, "loss": 0.0067, "step": 13044 }, { "epoch": 24.068328716528164, "grad_norm": 0.07895402610301971, "learning_rate": 2.8986e-05, "loss": 0.0019, "step": 13045 }, { "epoch": 24.07017543859649, "grad_norm": 0.15396630764007568, "learning_rate": 2.8985666666666666e-05, "loss": 0.0052, "step": 13046 }, { "epoch": 24.07202216066482, "grad_norm": 0.1727500557899475, "learning_rate": 2.8985333333333335e-05, "loss": 0.0033, "step": 13047 }, { "epoch": 24.073868882733148, "grad_norm": 1.133514165878296, "learning_rate": 2.8984999999999998e-05, "loss": 0.0144, "step": 13048 }, { "epoch": 24.075715604801477, "grad_norm": 0.7800816297531128, "learning_rate": 2.8984666666666667e-05, "loss": 0.0094, "step": 13049 }, { "epoch": 24.077562326869806, "grad_norm": 0.13474318385124207, "learning_rate": 2.8984333333333336e-05, "loss": 0.0057, "step": 13050 }, { "epoch": 24.079409048938135, "grad_norm": 0.20290903747081757, "learning_rate": 2.8984e-05, "loss": 0.0098, "step": 13051 }, { "epoch": 24.081255771006465, "grad_norm": 0.22160065174102783, "learning_rate": 2.8983666666666668e-05, "loss": 0.0046, "step": 13052 }, { "epoch": 24.083102493074794, "grad_norm": 0.5779085755348206, "learning_rate": 2.8983333333333334e-05, "loss": 0.0111, "step": 13053 }, { "epoch": 24.08494921514312, "grad_norm": 0.39063599705696106, "learning_rate": 2.8983e-05, "loss": 0.0076, "step": 13054 }, { "epoch": 24.08679593721145, "grad_norm": 0.34382039308547974, "learning_rate": 2.8982666666666666e-05, "loss": 0.008, "step": 13055 }, { "epoch": 24.088642659279778, "grad_norm": 0.10230110585689545, "learning_rate": 2.8982333333333335e-05, "loss": 0.0027, "step": 13056 }, { "epoch": 24.090489381348107, "grad_norm": 0.6774642467498779, "learning_rate": 2.8981999999999998e-05, "loss": 0.0165, "step": 13057 }, { "epoch": 24.092336103416436, "grad_norm": 0.2824467122554779, "learning_rate": 2.8981666666666667e-05, "loss": 0.0098, "step": 13058 }, { "epoch": 24.094182825484765, "grad_norm": 0.5672895908355713, "learning_rate": 2.8981333333333337e-05, "loss": 0.1176, "step": 13059 }, { "epoch": 24.096029547553094, "grad_norm": 0.3753058910369873, "learning_rate": 2.8981e-05, "loss": 0.1258, "step": 13060 }, { "epoch": 24.097876269621423, "grad_norm": 0.5236627459526062, "learning_rate": 2.898066666666667e-05, "loss": 0.0992, "step": 13061 }, { "epoch": 24.099722991689752, "grad_norm": 0.46382221579551697, "learning_rate": 2.8980333333333334e-05, "loss": 0.0433, "step": 13062 }, { "epoch": 24.101569713758078, "grad_norm": 0.3146475553512573, "learning_rate": 2.898e-05, "loss": 0.0483, "step": 13063 }, { "epoch": 24.103416435826407, "grad_norm": 0.4047550857067108, "learning_rate": 2.8979666666666666e-05, "loss": 0.0892, "step": 13064 }, { "epoch": 24.105263157894736, "grad_norm": 0.38300085067749023, "learning_rate": 2.8979333333333336e-05, "loss": 0.0531, "step": 13065 }, { "epoch": 24.107109879963065, "grad_norm": 0.4566096067428589, "learning_rate": 2.8979e-05, "loss": 0.0476, "step": 13066 }, { "epoch": 24.108956602031395, "grad_norm": 0.36871248483657837, "learning_rate": 2.8978666666666667e-05, "loss": 0.0597, "step": 13067 }, { "epoch": 24.110803324099724, "grad_norm": 0.42504021525382996, "learning_rate": 2.8978333333333337e-05, "loss": 0.0416, "step": 13068 }, { "epoch": 24.112650046168053, "grad_norm": 0.744490385055542, "learning_rate": 2.8978e-05, "loss": 0.0171, "step": 13069 }, { "epoch": 24.114496768236382, "grad_norm": 0.32120198011398315, "learning_rate": 2.897766666666667e-05, "loss": 0.0374, "step": 13070 }, { "epoch": 24.116343490304708, "grad_norm": 0.40598228573799133, "learning_rate": 2.8977333333333335e-05, "loss": 0.0406, "step": 13071 }, { "epoch": 24.118190212373037, "grad_norm": 0.44137951731681824, "learning_rate": 2.8977e-05, "loss": 0.0151, "step": 13072 }, { "epoch": 24.120036934441366, "grad_norm": 0.1768002212047577, "learning_rate": 2.8976666666666666e-05, "loss": 0.0071, "step": 13073 }, { "epoch": 24.121883656509695, "grad_norm": 0.13510826230049133, "learning_rate": 2.8976333333333332e-05, "loss": 0.0054, "step": 13074 }, { "epoch": 24.123730378578024, "grad_norm": 0.21818716824054718, "learning_rate": 2.8976000000000002e-05, "loss": 0.0068, "step": 13075 }, { "epoch": 24.125577100646353, "grad_norm": 0.16706795990467072, "learning_rate": 2.8975666666666668e-05, "loss": 0.0062, "step": 13076 }, { "epoch": 24.127423822714682, "grad_norm": 0.31920814514160156, "learning_rate": 2.8975333333333334e-05, "loss": 0.0098, "step": 13077 }, { "epoch": 24.12927054478301, "grad_norm": 0.2236560881137848, "learning_rate": 2.8975e-05, "loss": 0.0141, "step": 13078 }, { "epoch": 24.131117266851337, "grad_norm": 0.1601002961397171, "learning_rate": 2.897466666666667e-05, "loss": 0.005, "step": 13079 }, { "epoch": 24.132963988919666, "grad_norm": 0.13643929362297058, "learning_rate": 2.897433333333333e-05, "loss": 0.0048, "step": 13080 }, { "epoch": 24.134810710987995, "grad_norm": 0.24548067152500153, "learning_rate": 2.8974e-05, "loss": 0.0048, "step": 13081 }, { "epoch": 24.136657433056325, "grad_norm": 0.3406643569469452, "learning_rate": 2.8973666666666667e-05, "loss": 0.0129, "step": 13082 }, { "epoch": 24.138504155124654, "grad_norm": 0.362541526556015, "learning_rate": 2.8973333333333333e-05, "loss": 0.0058, "step": 13083 }, { "epoch": 24.140350877192983, "grad_norm": 0.21052448451519012, "learning_rate": 2.8973000000000002e-05, "loss": 0.0081, "step": 13084 }, { "epoch": 24.142197599261312, "grad_norm": 0.3793521523475647, "learning_rate": 2.8972666666666668e-05, "loss": 0.0037, "step": 13085 }, { "epoch": 24.14404432132964, "grad_norm": 0.11015784740447998, "learning_rate": 2.8972333333333334e-05, "loss": 0.0026, "step": 13086 }, { "epoch": 24.14589104339797, "grad_norm": 0.08412471413612366, "learning_rate": 2.8972e-05, "loss": 0.0023, "step": 13087 }, { "epoch": 24.147737765466296, "grad_norm": 0.2851986587047577, "learning_rate": 2.897166666666667e-05, "loss": 0.0031, "step": 13088 }, { "epoch": 24.149584487534625, "grad_norm": 0.1866234540939331, "learning_rate": 2.897133333333333e-05, "loss": 0.0027, "step": 13089 }, { "epoch": 24.151431209602954, "grad_norm": 0.10915181040763855, "learning_rate": 2.8971e-05, "loss": 0.0021, "step": 13090 }, { "epoch": 24.153277931671283, "grad_norm": 0.3201306164264679, "learning_rate": 2.897066666666667e-05, "loss": 0.0069, "step": 13091 }, { "epoch": 24.155124653739612, "grad_norm": 0.538478672504425, "learning_rate": 2.8970333333333333e-05, "loss": 0.0042, "step": 13092 }, { "epoch": 24.15697137580794, "grad_norm": 0.12584218382835388, "learning_rate": 2.8970000000000002e-05, "loss": 0.0036, "step": 13093 }, { "epoch": 24.15881809787627, "grad_norm": 0.2512202262878418, "learning_rate": 2.8969666666666668e-05, "loss": 0.0068, "step": 13094 }, { "epoch": 24.1606648199446, "grad_norm": 0.3698013424873352, "learning_rate": 2.8969333333333334e-05, "loss": 0.0074, "step": 13095 }, { "epoch": 24.162511542012926, "grad_norm": 0.13004982471466064, "learning_rate": 2.8969e-05, "loss": 0.0025, "step": 13096 }, { "epoch": 24.164358264081255, "grad_norm": 0.44549521803855896, "learning_rate": 2.896866666666667e-05, "loss": 0.0107, "step": 13097 }, { "epoch": 24.166204986149584, "grad_norm": 0.33616870641708374, "learning_rate": 2.8968333333333332e-05, "loss": 0.0087, "step": 13098 }, { "epoch": 24.168051708217913, "grad_norm": 0.114255391061306, "learning_rate": 2.8968e-05, "loss": 0.0017, "step": 13099 }, { "epoch": 24.169898430286242, "grad_norm": 0.8930944204330444, "learning_rate": 2.896766666666667e-05, "loss": 0.0139, "step": 13100 }, { "epoch": 24.17174515235457, "grad_norm": 0.6546629071235657, "learning_rate": 2.8967333333333333e-05, "loss": 0.0122, "step": 13101 }, { "epoch": 24.1735918744229, "grad_norm": 0.4253278076648712, "learning_rate": 2.8967000000000002e-05, "loss": 0.0088, "step": 13102 }, { "epoch": 24.17543859649123, "grad_norm": 0.2592533528804779, "learning_rate": 2.8966666666666665e-05, "loss": 0.0072, "step": 13103 }, { "epoch": 24.177285318559555, "grad_norm": 0.24567706882953644, "learning_rate": 2.8966333333333334e-05, "loss": 0.0038, "step": 13104 }, { "epoch": 24.179132040627884, "grad_norm": 0.21996194124221802, "learning_rate": 2.8966e-05, "loss": 0.0035, "step": 13105 }, { "epoch": 24.180978762696213, "grad_norm": 0.5467721223831177, "learning_rate": 2.8965666666666666e-05, "loss": 0.0069, "step": 13106 }, { "epoch": 24.182825484764543, "grad_norm": 0.19381636381149292, "learning_rate": 2.8965333333333332e-05, "loss": 0.0048, "step": 13107 }, { "epoch": 24.18467220683287, "grad_norm": 0.15538102388381958, "learning_rate": 2.8965e-05, "loss": 0.0124, "step": 13108 }, { "epoch": 24.1865189289012, "grad_norm": 0.6547185182571411, "learning_rate": 2.8964666666666667e-05, "loss": 0.1145, "step": 13109 }, { "epoch": 24.18836565096953, "grad_norm": 0.612529456615448, "learning_rate": 2.8964333333333333e-05, "loss": 0.1432, "step": 13110 }, { "epoch": 24.19021237303786, "grad_norm": 0.5609632730484009, "learning_rate": 2.8964000000000003e-05, "loss": 0.1157, "step": 13111 }, { "epoch": 24.19205909510619, "grad_norm": 0.3962590992450714, "learning_rate": 2.8963666666666665e-05, "loss": 0.1006, "step": 13112 }, { "epoch": 24.193905817174514, "grad_norm": 0.3930540382862091, "learning_rate": 2.8963333333333334e-05, "loss": 0.041, "step": 13113 }, { "epoch": 24.195752539242843, "grad_norm": 0.5370532870292664, "learning_rate": 2.8963e-05, "loss": 0.0613, "step": 13114 }, { "epoch": 24.197599261311172, "grad_norm": 0.5153535008430481, "learning_rate": 2.8962666666666666e-05, "loss": 0.0355, "step": 13115 }, { "epoch": 24.1994459833795, "grad_norm": 0.4103752374649048, "learning_rate": 2.8962333333333336e-05, "loss": 0.049, "step": 13116 }, { "epoch": 24.20129270544783, "grad_norm": 0.35563552379608154, "learning_rate": 2.8962e-05, "loss": 0.0612, "step": 13117 }, { "epoch": 24.20313942751616, "grad_norm": 0.3967292606830597, "learning_rate": 2.8961666666666667e-05, "loss": 0.0478, "step": 13118 }, { "epoch": 24.20498614958449, "grad_norm": 0.42570221424102783, "learning_rate": 2.8961333333333333e-05, "loss": 0.0329, "step": 13119 }, { "epoch": 24.206832871652818, "grad_norm": 0.34922346472740173, "learning_rate": 2.8961000000000003e-05, "loss": 0.0428, "step": 13120 }, { "epoch": 24.208679593721143, "grad_norm": 0.46752482652664185, "learning_rate": 2.8960666666666665e-05, "loss": 0.0234, "step": 13121 }, { "epoch": 24.210526315789473, "grad_norm": 0.7021492719650269, "learning_rate": 2.8960333333333335e-05, "loss": 0.015, "step": 13122 }, { "epoch": 24.2123730378578, "grad_norm": 0.28904110193252563, "learning_rate": 2.896e-05, "loss": 0.0157, "step": 13123 }, { "epoch": 24.21421975992613, "grad_norm": 0.36432945728302, "learning_rate": 2.8959666666666666e-05, "loss": 0.0299, "step": 13124 }, { "epoch": 24.21606648199446, "grad_norm": 0.26848480105400085, "learning_rate": 2.8959333333333336e-05, "loss": 0.009, "step": 13125 }, { "epoch": 24.21791320406279, "grad_norm": 0.19157792627811432, "learning_rate": 2.8959000000000002e-05, "loss": 0.0183, "step": 13126 }, { "epoch": 24.21975992613112, "grad_norm": 0.35590115189552307, "learning_rate": 2.8958666666666668e-05, "loss": 0.009, "step": 13127 }, { "epoch": 24.221606648199447, "grad_norm": 0.46946394443511963, "learning_rate": 2.8958333333333334e-05, "loss": 0.0125, "step": 13128 }, { "epoch": 24.223453370267773, "grad_norm": 0.3206366300582886, "learning_rate": 2.8958000000000003e-05, "loss": 0.0122, "step": 13129 }, { "epoch": 24.225300092336102, "grad_norm": 0.19763174653053284, "learning_rate": 2.8957666666666665e-05, "loss": 0.0051, "step": 13130 }, { "epoch": 24.22714681440443, "grad_norm": 0.18738523125648499, "learning_rate": 2.8957333333333335e-05, "loss": 0.0066, "step": 13131 }, { "epoch": 24.22899353647276, "grad_norm": 0.554878830909729, "learning_rate": 2.8957e-05, "loss": 0.0073, "step": 13132 }, { "epoch": 24.23084025854109, "grad_norm": 0.3369830548763275, "learning_rate": 2.8956666666666667e-05, "loss": 0.0064, "step": 13133 }, { "epoch": 24.23268698060942, "grad_norm": 0.3536060154438019, "learning_rate": 2.8956333333333336e-05, "loss": 0.006, "step": 13134 }, { "epoch": 24.234533702677748, "grad_norm": 0.30617913603782654, "learning_rate": 2.8956e-05, "loss": 0.0182, "step": 13135 }, { "epoch": 24.236380424746077, "grad_norm": 0.4099351465702057, "learning_rate": 2.8955666666666668e-05, "loss": 0.0025, "step": 13136 }, { "epoch": 24.238227146814406, "grad_norm": 0.2719319760799408, "learning_rate": 2.8955333333333334e-05, "loss": 0.0076, "step": 13137 }, { "epoch": 24.24007386888273, "grad_norm": 0.29288655519485474, "learning_rate": 2.8955e-05, "loss": 0.0071, "step": 13138 }, { "epoch": 24.24192059095106, "grad_norm": 0.36805543303489685, "learning_rate": 2.8954666666666666e-05, "loss": 0.0101, "step": 13139 }, { "epoch": 24.24376731301939, "grad_norm": 0.1612132489681244, "learning_rate": 2.8954333333333335e-05, "loss": 0.0045, "step": 13140 }, { "epoch": 24.24561403508772, "grad_norm": 0.17175914347171783, "learning_rate": 2.8954e-05, "loss": 0.0039, "step": 13141 }, { "epoch": 24.24746075715605, "grad_norm": 0.15694212913513184, "learning_rate": 2.8953666666666667e-05, "loss": 0.0041, "step": 13142 }, { "epoch": 24.249307479224377, "grad_norm": 0.1983942985534668, "learning_rate": 2.8953333333333336e-05, "loss": 0.0032, "step": 13143 }, { "epoch": 24.251154201292707, "grad_norm": 0.42327597737312317, "learning_rate": 2.8953e-05, "loss": 0.0112, "step": 13144 }, { "epoch": 24.253000923361036, "grad_norm": 0.29380321502685547, "learning_rate": 2.8952666666666668e-05, "loss": 0.0289, "step": 13145 }, { "epoch": 24.25484764542936, "grad_norm": 0.21177636086940765, "learning_rate": 2.8952333333333334e-05, "loss": 0.005, "step": 13146 }, { "epoch": 24.25669436749769, "grad_norm": 1.7719300985336304, "learning_rate": 2.8952e-05, "loss": 0.0124, "step": 13147 }, { "epoch": 24.25854108956602, "grad_norm": 0.41852688789367676, "learning_rate": 2.8951666666666666e-05, "loss": 0.0088, "step": 13148 }, { "epoch": 24.26038781163435, "grad_norm": 0.1518332064151764, "learning_rate": 2.8951333333333335e-05, "loss": 0.0046, "step": 13149 }, { "epoch": 24.262234533702678, "grad_norm": 0.09099515527486801, "learning_rate": 2.8951e-05, "loss": 0.0017, "step": 13150 }, { "epoch": 24.264081255771007, "grad_norm": 0.1368519365787506, "learning_rate": 2.8950666666666667e-05, "loss": 0.0021, "step": 13151 }, { "epoch": 24.265927977839336, "grad_norm": 0.27972957491874695, "learning_rate": 2.8950333333333336e-05, "loss": 0.0131, "step": 13152 }, { "epoch": 24.267774699907665, "grad_norm": 0.39006727933883667, "learning_rate": 2.895e-05, "loss": 0.0109, "step": 13153 }, { "epoch": 24.26962142197599, "grad_norm": 0.23203179240226746, "learning_rate": 2.8949666666666668e-05, "loss": 0.0073, "step": 13154 }, { "epoch": 24.27146814404432, "grad_norm": 0.43176040053367615, "learning_rate": 2.8949333333333334e-05, "loss": 0.0131, "step": 13155 }, { "epoch": 24.27331486611265, "grad_norm": 0.1430387645959854, "learning_rate": 2.8949e-05, "loss": 0.0032, "step": 13156 }, { "epoch": 24.27516158818098, "grad_norm": 0.32787269353866577, "learning_rate": 2.8948666666666666e-05, "loss": 0.008, "step": 13157 }, { "epoch": 24.277008310249307, "grad_norm": 0.3612595498561859, "learning_rate": 2.8948333333333335e-05, "loss": 0.0124, "step": 13158 }, { "epoch": 24.278855032317637, "grad_norm": 0.8463873863220215, "learning_rate": 2.8948e-05, "loss": 0.1127, "step": 13159 }, { "epoch": 24.280701754385966, "grad_norm": 0.453067421913147, "learning_rate": 2.8947666666666667e-05, "loss": 0.0955, "step": 13160 }, { "epoch": 24.282548476454295, "grad_norm": 0.4151947796344757, "learning_rate": 2.8947333333333337e-05, "loss": 0.068, "step": 13161 }, { "epoch": 24.284395198522624, "grad_norm": 0.3739778995513916, "learning_rate": 2.8947e-05, "loss": 0.0635, "step": 13162 }, { "epoch": 24.28624192059095, "grad_norm": 0.486072838306427, "learning_rate": 2.894666666666667e-05, "loss": 0.0461, "step": 13163 }, { "epoch": 24.28808864265928, "grad_norm": 0.33261334896087646, "learning_rate": 2.894633333333333e-05, "loss": 0.048, "step": 13164 }, { "epoch": 24.289935364727608, "grad_norm": 0.3986312448978424, "learning_rate": 2.8946e-05, "loss": 0.0442, "step": 13165 }, { "epoch": 24.291782086795937, "grad_norm": 0.5497700572013855, "learning_rate": 2.894566666666667e-05, "loss": 0.0656, "step": 13166 }, { "epoch": 24.293628808864266, "grad_norm": 0.41510388255119324, "learning_rate": 2.8945333333333332e-05, "loss": 0.058, "step": 13167 }, { "epoch": 24.295475530932595, "grad_norm": 0.28671392798423767, "learning_rate": 2.8945e-05, "loss": 0.031, "step": 13168 }, { "epoch": 24.297322253000925, "grad_norm": 0.8106816411018372, "learning_rate": 2.8944666666666667e-05, "loss": 0.0557, "step": 13169 }, { "epoch": 24.299168975069254, "grad_norm": 0.26283910870552063, "learning_rate": 2.8944333333333333e-05, "loss": 0.021, "step": 13170 }, { "epoch": 24.30101569713758, "grad_norm": 0.3150317370891571, "learning_rate": 2.8944e-05, "loss": 0.0208, "step": 13171 }, { "epoch": 24.30286241920591, "grad_norm": 0.448843389749527, "learning_rate": 2.894366666666667e-05, "loss": 0.0123, "step": 13172 }, { "epoch": 24.304709141274238, "grad_norm": 0.19662699103355408, "learning_rate": 2.894333333333333e-05, "loss": 0.0088, "step": 13173 }, { "epoch": 24.306555863342567, "grad_norm": 0.2782691717147827, "learning_rate": 2.8943e-05, "loss": 0.0099, "step": 13174 }, { "epoch": 24.308402585410896, "grad_norm": 0.12477515637874603, "learning_rate": 2.894266666666667e-05, "loss": 0.0048, "step": 13175 }, { "epoch": 24.310249307479225, "grad_norm": 0.44106438755989075, "learning_rate": 2.8942333333333332e-05, "loss": 0.009, "step": 13176 }, { "epoch": 24.312096029547554, "grad_norm": 0.15457753837108612, "learning_rate": 2.8942000000000002e-05, "loss": 0.0067, "step": 13177 }, { "epoch": 24.313942751615883, "grad_norm": 0.14934605360031128, "learning_rate": 2.8941666666666668e-05, "loss": 0.0059, "step": 13178 }, { "epoch": 24.31578947368421, "grad_norm": 0.15137791633605957, "learning_rate": 2.8941333333333334e-05, "loss": 0.0039, "step": 13179 }, { "epoch": 24.317636195752538, "grad_norm": 0.21593965590000153, "learning_rate": 2.8941e-05, "loss": 0.0085, "step": 13180 }, { "epoch": 24.319482917820867, "grad_norm": 0.2960059344768524, "learning_rate": 2.894066666666667e-05, "loss": 0.0063, "step": 13181 }, { "epoch": 24.321329639889196, "grad_norm": 0.13511642813682556, "learning_rate": 2.894033333333333e-05, "loss": 0.0022, "step": 13182 }, { "epoch": 24.323176361957525, "grad_norm": 0.22784291207790375, "learning_rate": 2.894e-05, "loss": 0.0063, "step": 13183 }, { "epoch": 24.325023084025855, "grad_norm": 0.11898264288902283, "learning_rate": 2.893966666666667e-05, "loss": 0.004, "step": 13184 }, { "epoch": 24.326869806094184, "grad_norm": 0.23218907415866852, "learning_rate": 2.8939333333333333e-05, "loss": 0.0049, "step": 13185 }, { "epoch": 24.328716528162513, "grad_norm": 0.2760145664215088, "learning_rate": 2.8939000000000002e-05, "loss": 0.0036, "step": 13186 }, { "epoch": 24.330563250230842, "grad_norm": 0.22240634262561798, "learning_rate": 2.8938666666666668e-05, "loss": 0.0041, "step": 13187 }, { "epoch": 24.332409972299168, "grad_norm": 0.18136060237884521, "learning_rate": 2.8938333333333334e-05, "loss": 0.0048, "step": 13188 }, { "epoch": 24.334256694367497, "grad_norm": 0.31389379501342773, "learning_rate": 2.8938e-05, "loss": 0.0049, "step": 13189 }, { "epoch": 24.336103416435826, "grad_norm": 0.2006513774394989, "learning_rate": 2.893766666666667e-05, "loss": 0.0063, "step": 13190 }, { "epoch": 24.337950138504155, "grad_norm": 0.3250141143798828, "learning_rate": 2.8937333333333335e-05, "loss": 0.0057, "step": 13191 }, { "epoch": 24.339796860572484, "grad_norm": 0.1399124562740326, "learning_rate": 2.8937e-05, "loss": 0.0055, "step": 13192 }, { "epoch": 24.341643582640813, "grad_norm": 0.36624839901924133, "learning_rate": 2.893666666666667e-05, "loss": 0.008, "step": 13193 }, { "epoch": 24.343490304709142, "grad_norm": 0.2624996304512024, "learning_rate": 2.8936333333333333e-05, "loss": 0.0142, "step": 13194 }, { "epoch": 24.34533702677747, "grad_norm": 0.16521060466766357, "learning_rate": 2.8936000000000002e-05, "loss": 0.0033, "step": 13195 }, { "epoch": 24.347183748845797, "grad_norm": 0.1808953732252121, "learning_rate": 2.8935666666666665e-05, "loss": 0.0018, "step": 13196 }, { "epoch": 24.349030470914126, "grad_norm": 0.2830214500427246, "learning_rate": 2.8935333333333334e-05, "loss": 0.0082, "step": 13197 }, { "epoch": 24.350877192982455, "grad_norm": 0.8021502494812012, "learning_rate": 2.8935e-05, "loss": 0.0118, "step": 13198 }, { "epoch": 24.352723915050785, "grad_norm": 0.28621774911880493, "learning_rate": 2.8934666666666666e-05, "loss": 0.0078, "step": 13199 }, { "epoch": 24.354570637119114, "grad_norm": 0.5483892560005188, "learning_rate": 2.8934333333333335e-05, "loss": 0.005, "step": 13200 }, { "epoch": 24.356417359187443, "grad_norm": 0.11460002511739731, "learning_rate": 2.8934e-05, "loss": 0.0026, "step": 13201 }, { "epoch": 24.358264081255772, "grad_norm": 0.22816605865955353, "learning_rate": 2.8933666666666667e-05, "loss": 0.0056, "step": 13202 }, { "epoch": 24.3601108033241, "grad_norm": 0.06393951922655106, "learning_rate": 2.8933333333333333e-05, "loss": 0.0011, "step": 13203 }, { "epoch": 24.361957525392427, "grad_norm": 0.26738423109054565, "learning_rate": 2.8933000000000002e-05, "loss": 0.0034, "step": 13204 }, { "epoch": 24.363804247460756, "grad_norm": 0.4886252284049988, "learning_rate": 2.8932666666666665e-05, "loss": 0.0043, "step": 13205 }, { "epoch": 24.365650969529085, "grad_norm": 0.3189277648925781, "learning_rate": 2.8932333333333334e-05, "loss": 0.0057, "step": 13206 }, { "epoch": 24.367497691597414, "grad_norm": 0.1422029286623001, "learning_rate": 2.8932e-05, "loss": 0.004, "step": 13207 }, { "epoch": 24.369344413665743, "grad_norm": 0.07584530115127563, "learning_rate": 2.8931666666666666e-05, "loss": 0.001, "step": 13208 }, { "epoch": 24.371191135734072, "grad_norm": 0.9531792998313904, "learning_rate": 2.8931333333333335e-05, "loss": 0.1437, "step": 13209 }, { "epoch": 24.3730378578024, "grad_norm": 0.6607851982116699, "learning_rate": 2.8931e-05, "loss": 0.124, "step": 13210 }, { "epoch": 24.37488457987073, "grad_norm": 0.43645626306533813, "learning_rate": 2.8930666666666667e-05, "loss": 0.0584, "step": 13211 }, { "epoch": 24.37673130193906, "grad_norm": 0.7066429853439331, "learning_rate": 2.8930333333333333e-05, "loss": 0.1331, "step": 13212 }, { "epoch": 24.378578024007385, "grad_norm": 0.6537086963653564, "learning_rate": 2.8930000000000003e-05, "loss": 0.0721, "step": 13213 }, { "epoch": 24.380424746075715, "grad_norm": 0.481343150138855, "learning_rate": 2.8929666666666665e-05, "loss": 0.0539, "step": 13214 }, { "epoch": 24.382271468144044, "grad_norm": 1.752601146697998, "learning_rate": 2.8929333333333334e-05, "loss": 0.0644, "step": 13215 }, { "epoch": 24.384118190212373, "grad_norm": 0.49992284178733826, "learning_rate": 2.8929000000000004e-05, "loss": 0.0564, "step": 13216 }, { "epoch": 24.385964912280702, "grad_norm": 0.4543759822845459, "learning_rate": 2.8928666666666666e-05, "loss": 0.0531, "step": 13217 }, { "epoch": 24.38781163434903, "grad_norm": 0.4484714865684509, "learning_rate": 2.8928333333333336e-05, "loss": 0.0298, "step": 13218 }, { "epoch": 24.38965835641736, "grad_norm": 0.5918431878089905, "learning_rate": 2.8928e-05, "loss": 0.044, "step": 13219 }, { "epoch": 24.39150507848569, "grad_norm": 0.29719340801239014, "learning_rate": 2.8927666666666667e-05, "loss": 0.0324, "step": 13220 }, { "epoch": 24.393351800554015, "grad_norm": 0.20278289914131165, "learning_rate": 2.8927333333333333e-05, "loss": 0.0549, "step": 13221 }, { "epoch": 24.395198522622344, "grad_norm": 0.2530817985534668, "learning_rate": 2.8927000000000003e-05, "loss": 0.0388, "step": 13222 }, { "epoch": 24.397045244690673, "grad_norm": 0.5364762544631958, "learning_rate": 2.8926666666666665e-05, "loss": 0.0477, "step": 13223 }, { "epoch": 24.398891966759003, "grad_norm": 0.7806441187858582, "learning_rate": 2.8926333333333335e-05, "loss": 0.0251, "step": 13224 }, { "epoch": 24.40073868882733, "grad_norm": 0.29668524861335754, "learning_rate": 2.8926e-05, "loss": 0.0114, "step": 13225 }, { "epoch": 24.40258541089566, "grad_norm": 0.282160222530365, "learning_rate": 2.8925666666666666e-05, "loss": 0.0141, "step": 13226 }, { "epoch": 24.40443213296399, "grad_norm": 0.24868442118167877, "learning_rate": 2.8925333333333336e-05, "loss": 0.0102, "step": 13227 }, { "epoch": 24.40627885503232, "grad_norm": 0.30800092220306396, "learning_rate": 2.8925e-05, "loss": 0.0074, "step": 13228 }, { "epoch": 24.408125577100645, "grad_norm": 0.23089873790740967, "learning_rate": 2.8924666666666668e-05, "loss": 0.021, "step": 13229 }, { "epoch": 24.409972299168974, "grad_norm": 0.33230042457580566, "learning_rate": 2.8924333333333334e-05, "loss": 0.0067, "step": 13230 }, { "epoch": 24.411819021237303, "grad_norm": 0.40151605010032654, "learning_rate": 2.8924e-05, "loss": 0.0063, "step": 13231 }, { "epoch": 24.413665743305632, "grad_norm": 0.29878801107406616, "learning_rate": 2.8923666666666665e-05, "loss": 0.0052, "step": 13232 }, { "epoch": 24.41551246537396, "grad_norm": 0.1386539191007614, "learning_rate": 2.8923333333333335e-05, "loss": 0.0035, "step": 13233 }, { "epoch": 24.41735918744229, "grad_norm": 0.20161989331245422, "learning_rate": 2.8923e-05, "loss": 0.0081, "step": 13234 }, { "epoch": 24.41920590951062, "grad_norm": 0.23625294864177704, "learning_rate": 2.8922666666666667e-05, "loss": 0.0042, "step": 13235 }, { "epoch": 24.42105263157895, "grad_norm": 0.45596843957901, "learning_rate": 2.8922333333333336e-05, "loss": 0.0075, "step": 13236 }, { "epoch": 24.422899353647278, "grad_norm": 0.2962850034236908, "learning_rate": 2.8922e-05, "loss": 0.0054, "step": 13237 }, { "epoch": 24.424746075715603, "grad_norm": 0.7074769139289856, "learning_rate": 2.8921666666666668e-05, "loss": 0.0161, "step": 13238 }, { "epoch": 24.426592797783933, "grad_norm": 0.22207657992839813, "learning_rate": 2.8921333333333334e-05, "loss": 0.0047, "step": 13239 }, { "epoch": 24.42843951985226, "grad_norm": 0.26032233238220215, "learning_rate": 2.8921e-05, "loss": 0.0067, "step": 13240 }, { "epoch": 24.43028624192059, "grad_norm": 0.15774919092655182, "learning_rate": 2.892066666666667e-05, "loss": 0.005, "step": 13241 }, { "epoch": 24.43213296398892, "grad_norm": 0.20989170670509338, "learning_rate": 2.8920333333333335e-05, "loss": 0.0039, "step": 13242 }, { "epoch": 24.43397968605725, "grad_norm": 0.25299087166786194, "learning_rate": 2.892e-05, "loss": 0.0044, "step": 13243 }, { "epoch": 24.43582640812558, "grad_norm": 0.31932535767555237, "learning_rate": 2.8919666666666667e-05, "loss": 0.0128, "step": 13244 }, { "epoch": 24.437673130193907, "grad_norm": 0.14626465737819672, "learning_rate": 2.8919333333333336e-05, "loss": 0.0029, "step": 13245 }, { "epoch": 24.439519852262233, "grad_norm": 0.2414906620979309, "learning_rate": 2.8919e-05, "loss": 0.0026, "step": 13246 }, { "epoch": 24.441366574330562, "grad_norm": 0.20157982409000397, "learning_rate": 2.8918666666666668e-05, "loss": 0.0074, "step": 13247 }, { "epoch": 24.44321329639889, "grad_norm": 0.14444953203201294, "learning_rate": 2.8918333333333334e-05, "loss": 0.0018, "step": 13248 }, { "epoch": 24.44506001846722, "grad_norm": 0.26020002365112305, "learning_rate": 2.8918e-05, "loss": 0.0115, "step": 13249 }, { "epoch": 24.44690674053555, "grad_norm": 0.18706028163433075, "learning_rate": 2.891766666666667e-05, "loss": 0.0071, "step": 13250 }, { "epoch": 24.44875346260388, "grad_norm": 0.7018864154815674, "learning_rate": 2.8917333333333335e-05, "loss": 0.0053, "step": 13251 }, { "epoch": 24.450600184672208, "grad_norm": 0.20267437398433685, "learning_rate": 2.8917e-05, "loss": 0.007, "step": 13252 }, { "epoch": 24.452446906740537, "grad_norm": 0.22838689386844635, "learning_rate": 2.8916666666666667e-05, "loss": 0.0072, "step": 13253 }, { "epoch": 24.454293628808863, "grad_norm": 0.30894041061401367, "learning_rate": 2.8916333333333336e-05, "loss": 0.0039, "step": 13254 }, { "epoch": 24.45614035087719, "grad_norm": 0.33067917823791504, "learning_rate": 2.8916e-05, "loss": 0.0084, "step": 13255 }, { "epoch": 24.45798707294552, "grad_norm": 0.10167045146226883, "learning_rate": 2.8915666666666668e-05, "loss": 0.003, "step": 13256 }, { "epoch": 24.45983379501385, "grad_norm": 0.37686604261398315, "learning_rate": 2.8915333333333334e-05, "loss": 0.014, "step": 13257 }, { "epoch": 24.46168051708218, "grad_norm": 0.5401965379714966, "learning_rate": 2.8915e-05, "loss": 0.0052, "step": 13258 }, { "epoch": 24.46352723915051, "grad_norm": 0.7932718992233276, "learning_rate": 2.891466666666667e-05, "loss": 0.1161, "step": 13259 }, { "epoch": 24.465373961218837, "grad_norm": 0.6346531510353088, "learning_rate": 2.8914333333333332e-05, "loss": 0.1108, "step": 13260 }, { "epoch": 24.467220683287167, "grad_norm": 0.6250077486038208, "learning_rate": 2.8914e-05, "loss": 0.1033, "step": 13261 }, { "epoch": 24.469067405355496, "grad_norm": 0.5043835639953613, "learning_rate": 2.8913666666666667e-05, "loss": 0.0862, "step": 13262 }, { "epoch": 24.47091412742382, "grad_norm": 0.3136883080005646, "learning_rate": 2.8913333333333333e-05, "loss": 0.0542, "step": 13263 }, { "epoch": 24.47276084949215, "grad_norm": 0.4678816795349121, "learning_rate": 2.8913e-05, "loss": 0.0719, "step": 13264 }, { "epoch": 24.47460757156048, "grad_norm": 0.30108150839805603, "learning_rate": 2.891266666666667e-05, "loss": 0.0384, "step": 13265 }, { "epoch": 24.47645429362881, "grad_norm": 0.33100152015686035, "learning_rate": 2.8912333333333334e-05, "loss": 0.042, "step": 13266 }, { "epoch": 24.478301015697138, "grad_norm": 0.41817301511764526, "learning_rate": 2.8912e-05, "loss": 0.0338, "step": 13267 }, { "epoch": 24.480147737765467, "grad_norm": 0.5045404434204102, "learning_rate": 2.891166666666667e-05, "loss": 0.0461, "step": 13268 }, { "epoch": 24.481994459833796, "grad_norm": 0.3951895236968994, "learning_rate": 2.8911333333333332e-05, "loss": 0.0282, "step": 13269 }, { "epoch": 24.483841181902125, "grad_norm": 0.5995557904243469, "learning_rate": 2.8911e-05, "loss": 0.0291, "step": 13270 }, { "epoch": 24.48568790397045, "grad_norm": 0.39431875944137573, "learning_rate": 2.8910666666666667e-05, "loss": 0.0178, "step": 13271 }, { "epoch": 24.48753462603878, "grad_norm": 0.19993415474891663, "learning_rate": 2.8910333333333333e-05, "loss": 0.0146, "step": 13272 }, { "epoch": 24.48938134810711, "grad_norm": 0.49636518955230713, "learning_rate": 2.891e-05, "loss": 0.022, "step": 13273 }, { "epoch": 24.49122807017544, "grad_norm": 0.16462446749210358, "learning_rate": 2.890966666666667e-05, "loss": 0.0063, "step": 13274 }, { "epoch": 24.493074792243767, "grad_norm": 0.2293635904788971, "learning_rate": 2.8909333333333335e-05, "loss": 0.0052, "step": 13275 }, { "epoch": 24.494921514312097, "grad_norm": 0.2452090084552765, "learning_rate": 2.8909e-05, "loss": 0.0078, "step": 13276 }, { "epoch": 24.496768236380426, "grad_norm": 0.5290648341178894, "learning_rate": 2.890866666666667e-05, "loss": 0.0172, "step": 13277 }, { "epoch": 24.498614958448755, "grad_norm": 0.28430402278900146, "learning_rate": 2.8908333333333332e-05, "loss": 0.0126, "step": 13278 }, { "epoch": 24.50046168051708, "grad_norm": 0.22045278549194336, "learning_rate": 2.8908000000000002e-05, "loss": 0.0115, "step": 13279 }, { "epoch": 24.50230840258541, "grad_norm": 0.28687742352485657, "learning_rate": 2.8907666666666668e-05, "loss": 0.006, "step": 13280 }, { "epoch": 24.50415512465374, "grad_norm": 0.2711004316806793, "learning_rate": 2.8907333333333334e-05, "loss": 0.0083, "step": 13281 }, { "epoch": 24.506001846722068, "grad_norm": 0.1592472940683365, "learning_rate": 2.8907e-05, "loss": 0.0054, "step": 13282 }, { "epoch": 24.507848568790397, "grad_norm": 0.20724567770957947, "learning_rate": 2.890666666666667e-05, "loss": 0.0056, "step": 13283 }, { "epoch": 24.509695290858726, "grad_norm": 0.19441282749176025, "learning_rate": 2.8906333333333335e-05, "loss": 0.0047, "step": 13284 }, { "epoch": 24.511542012927055, "grad_norm": 0.2539184093475342, "learning_rate": 2.8906e-05, "loss": 0.008, "step": 13285 }, { "epoch": 24.513388734995385, "grad_norm": 0.36838990449905396, "learning_rate": 2.8905666666666667e-05, "loss": 0.0067, "step": 13286 }, { "epoch": 24.51523545706371, "grad_norm": 0.23784856498241425, "learning_rate": 2.8905333333333333e-05, "loss": 0.0058, "step": 13287 }, { "epoch": 24.51708217913204, "grad_norm": 0.1111980453133583, "learning_rate": 2.8905000000000002e-05, "loss": 0.0024, "step": 13288 }, { "epoch": 24.51892890120037, "grad_norm": 0.24265041947364807, "learning_rate": 2.8904666666666664e-05, "loss": 0.0087, "step": 13289 }, { "epoch": 24.520775623268698, "grad_norm": 0.21897314488887787, "learning_rate": 2.8904333333333334e-05, "loss": 0.0052, "step": 13290 }, { "epoch": 24.522622345337027, "grad_norm": 0.29665127396583557, "learning_rate": 2.8904000000000003e-05, "loss": 0.0144, "step": 13291 }, { "epoch": 24.524469067405356, "grad_norm": 0.2656313180923462, "learning_rate": 2.8903666666666666e-05, "loss": 0.008, "step": 13292 }, { "epoch": 24.526315789473685, "grad_norm": 0.1815796196460724, "learning_rate": 2.8903333333333335e-05, "loss": 0.0042, "step": 13293 }, { "epoch": 24.528162511542014, "grad_norm": 0.2628864049911499, "learning_rate": 2.8903e-05, "loss": 0.0061, "step": 13294 }, { "epoch": 24.530009233610343, "grad_norm": 0.09849076718091965, "learning_rate": 2.8902666666666667e-05, "loss": 0.0026, "step": 13295 }, { "epoch": 24.53185595567867, "grad_norm": 0.25689443945884705, "learning_rate": 2.8902333333333333e-05, "loss": 0.0053, "step": 13296 }, { "epoch": 24.533702677746998, "grad_norm": 0.28762394189834595, "learning_rate": 2.8902000000000002e-05, "loss": 0.0081, "step": 13297 }, { "epoch": 24.535549399815327, "grad_norm": 0.14530755579471588, "learning_rate": 2.8901666666666665e-05, "loss": 0.0043, "step": 13298 }, { "epoch": 24.537396121883656, "grad_norm": 0.10289536416530609, "learning_rate": 2.8901333333333334e-05, "loss": 0.0027, "step": 13299 }, { "epoch": 24.539242843951985, "grad_norm": 0.33966517448425293, "learning_rate": 2.8901000000000003e-05, "loss": 0.007, "step": 13300 }, { "epoch": 24.541089566020315, "grad_norm": 0.21148870885372162, "learning_rate": 2.8900666666666666e-05, "loss": 0.0036, "step": 13301 }, { "epoch": 24.542936288088644, "grad_norm": 0.24400590360164642, "learning_rate": 2.8900333333333335e-05, "loss": 0.0048, "step": 13302 }, { "epoch": 24.544783010156973, "grad_norm": 0.2181837111711502, "learning_rate": 2.89e-05, "loss": 0.0045, "step": 13303 }, { "epoch": 24.5466297322253, "grad_norm": 0.16021010279655457, "learning_rate": 2.8899666666666667e-05, "loss": 0.004, "step": 13304 }, { "epoch": 24.548476454293628, "grad_norm": 0.39383822679519653, "learning_rate": 2.8899333333333333e-05, "loss": 0.0203, "step": 13305 }, { "epoch": 24.550323176361957, "grad_norm": 0.6963154077529907, "learning_rate": 2.8899000000000002e-05, "loss": 0.0139, "step": 13306 }, { "epoch": 24.552169898430286, "grad_norm": 0.4663408100605011, "learning_rate": 2.8898666666666668e-05, "loss": 0.0112, "step": 13307 }, { "epoch": 24.554016620498615, "grad_norm": 0.6084351539611816, "learning_rate": 2.8898333333333334e-05, "loss": 0.0091, "step": 13308 }, { "epoch": 24.555863342566944, "grad_norm": 0.4496295154094696, "learning_rate": 2.8898000000000004e-05, "loss": 0.0855, "step": 13309 }, { "epoch": 24.557710064635273, "grad_norm": 0.5155513882637024, "learning_rate": 2.8897666666666666e-05, "loss": 0.0998, "step": 13310 }, { "epoch": 24.559556786703602, "grad_norm": 0.6382702589035034, "learning_rate": 2.8897333333333335e-05, "loss": 0.1049, "step": 13311 }, { "epoch": 24.56140350877193, "grad_norm": 0.8368784785270691, "learning_rate": 2.8897e-05, "loss": 0.0648, "step": 13312 }, { "epoch": 24.563250230840257, "grad_norm": 0.5362563729286194, "learning_rate": 2.8896666666666667e-05, "loss": 0.0595, "step": 13313 }, { "epoch": 24.565096952908586, "grad_norm": 0.42777219414711, "learning_rate": 2.8896333333333333e-05, "loss": 0.0354, "step": 13314 }, { "epoch": 24.566943674976915, "grad_norm": 0.5480246543884277, "learning_rate": 2.8896e-05, "loss": 0.0547, "step": 13315 }, { "epoch": 24.568790397045245, "grad_norm": 0.5275306105613708, "learning_rate": 2.889566666666667e-05, "loss": 0.0548, "step": 13316 }, { "epoch": 24.570637119113574, "grad_norm": 0.4799412786960602, "learning_rate": 2.8895333333333334e-05, "loss": 0.0294, "step": 13317 }, { "epoch": 24.572483841181903, "grad_norm": 0.3169337213039398, "learning_rate": 2.8895e-05, "loss": 0.0237, "step": 13318 }, { "epoch": 24.574330563250232, "grad_norm": 0.3254132866859436, "learning_rate": 2.8894666666666666e-05, "loss": 0.018, "step": 13319 }, { "epoch": 24.57617728531856, "grad_norm": 0.1958315074443817, "learning_rate": 2.8894333333333336e-05, "loss": 0.0166, "step": 13320 }, { "epoch": 24.578024007386887, "grad_norm": 0.3534824848175049, "learning_rate": 2.8893999999999998e-05, "loss": 0.0379, "step": 13321 }, { "epoch": 24.579870729455216, "grad_norm": 0.2735038101673126, "learning_rate": 2.8893666666666667e-05, "loss": 0.0295, "step": 13322 }, { "epoch": 24.581717451523545, "grad_norm": 0.2168884128332138, "learning_rate": 2.8893333333333333e-05, "loss": 0.0195, "step": 13323 }, { "epoch": 24.583564173591874, "grad_norm": 0.3940054774284363, "learning_rate": 2.8893e-05, "loss": 0.0138, "step": 13324 }, { "epoch": 24.585410895660203, "grad_norm": 0.10811630636453629, "learning_rate": 2.889266666666667e-05, "loss": 0.0043, "step": 13325 }, { "epoch": 24.587257617728532, "grad_norm": 0.27094316482543945, "learning_rate": 2.8892333333333335e-05, "loss": 0.0207, "step": 13326 }, { "epoch": 24.58910433979686, "grad_norm": 0.2474210113286972, "learning_rate": 2.8892e-05, "loss": 0.0125, "step": 13327 }, { "epoch": 24.59095106186519, "grad_norm": 0.24952836334705353, "learning_rate": 2.8891666666666666e-05, "loss": 0.038, "step": 13328 }, { "epoch": 24.592797783933516, "grad_norm": 0.19649440050125122, "learning_rate": 2.8891333333333336e-05, "loss": 0.0074, "step": 13329 }, { "epoch": 24.594644506001845, "grad_norm": 0.290304571390152, "learning_rate": 2.8891e-05, "loss": 0.009, "step": 13330 }, { "epoch": 24.596491228070175, "grad_norm": 0.28430837392807007, "learning_rate": 2.8890666666666668e-05, "loss": 0.0062, "step": 13331 }, { "epoch": 24.598337950138504, "grad_norm": 0.14937494695186615, "learning_rate": 2.8890333333333334e-05, "loss": 0.0045, "step": 13332 }, { "epoch": 24.600184672206833, "grad_norm": 0.17749227583408356, "learning_rate": 2.889e-05, "loss": 0.0039, "step": 13333 }, { "epoch": 24.602031394275162, "grad_norm": 0.16499114036560059, "learning_rate": 2.888966666666667e-05, "loss": 0.0054, "step": 13334 }, { "epoch": 24.60387811634349, "grad_norm": 0.38722649216651917, "learning_rate": 2.8889333333333335e-05, "loss": 0.0109, "step": 13335 }, { "epoch": 24.60572483841182, "grad_norm": 0.5073453187942505, "learning_rate": 2.8889e-05, "loss": 0.0095, "step": 13336 }, { "epoch": 24.607571560480146, "grad_norm": 0.1586906909942627, "learning_rate": 2.8888666666666667e-05, "loss": 0.0034, "step": 13337 }, { "epoch": 24.609418282548475, "grad_norm": 0.15562497079372406, "learning_rate": 2.8888333333333336e-05, "loss": 0.0036, "step": 13338 }, { "epoch": 24.611265004616804, "grad_norm": 0.23065587878227234, "learning_rate": 2.8888e-05, "loss": 0.0048, "step": 13339 }, { "epoch": 24.613111726685133, "grad_norm": 0.5878494381904602, "learning_rate": 2.8887666666666668e-05, "loss": 0.0104, "step": 13340 }, { "epoch": 24.614958448753463, "grad_norm": 0.2063795030117035, "learning_rate": 2.8887333333333337e-05, "loss": 0.0055, "step": 13341 }, { "epoch": 24.61680517082179, "grad_norm": 0.1259927600622177, "learning_rate": 2.8887e-05, "loss": 0.003, "step": 13342 }, { "epoch": 24.61865189289012, "grad_norm": 0.15318113565444946, "learning_rate": 2.888666666666667e-05, "loss": 0.0035, "step": 13343 }, { "epoch": 24.62049861495845, "grad_norm": 0.2823616862297058, "learning_rate": 2.8886333333333335e-05, "loss": 0.0066, "step": 13344 }, { "epoch": 24.62234533702678, "grad_norm": 0.23812097311019897, "learning_rate": 2.8886e-05, "loss": 0.0325, "step": 13345 }, { "epoch": 24.624192059095105, "grad_norm": 0.14409127831459045, "learning_rate": 2.8885666666666667e-05, "loss": 0.0033, "step": 13346 }, { "epoch": 24.626038781163434, "grad_norm": 0.0659145936369896, "learning_rate": 2.8885333333333333e-05, "loss": 0.0012, "step": 13347 }, { "epoch": 24.627885503231763, "grad_norm": 0.3200194239616394, "learning_rate": 2.8885e-05, "loss": 0.0105, "step": 13348 }, { "epoch": 24.629732225300092, "grad_norm": 0.26891952753067017, "learning_rate": 2.8884666666666668e-05, "loss": 0.0052, "step": 13349 }, { "epoch": 24.63157894736842, "grad_norm": 0.328961580991745, "learning_rate": 2.8884333333333334e-05, "loss": 0.0074, "step": 13350 }, { "epoch": 24.63342566943675, "grad_norm": 0.1385984867811203, "learning_rate": 2.8884e-05, "loss": 0.0094, "step": 13351 }, { "epoch": 24.63527239150508, "grad_norm": 0.15883958339691162, "learning_rate": 2.888366666666667e-05, "loss": 0.0046, "step": 13352 }, { "epoch": 24.63711911357341, "grad_norm": 0.21640440821647644, "learning_rate": 2.8883333333333332e-05, "loss": 0.003, "step": 13353 }, { "epoch": 24.638965835641734, "grad_norm": 0.18219715356826782, "learning_rate": 2.8883e-05, "loss": 0.0043, "step": 13354 }, { "epoch": 24.640812557710063, "grad_norm": 0.23579628765583038, "learning_rate": 2.8882666666666667e-05, "loss": 0.0042, "step": 13355 }, { "epoch": 24.642659279778393, "grad_norm": 0.3087804913520813, "learning_rate": 2.8882333333333333e-05, "loss": 0.0094, "step": 13356 }, { "epoch": 24.64450600184672, "grad_norm": 0.3579120635986328, "learning_rate": 2.8882000000000002e-05, "loss": 0.0137, "step": 13357 }, { "epoch": 24.64635272391505, "grad_norm": 0.3803880512714386, "learning_rate": 2.8881666666666668e-05, "loss": 0.0114, "step": 13358 }, { "epoch": 24.64819944598338, "grad_norm": 0.5040902495384216, "learning_rate": 2.8881333333333334e-05, "loss": 0.1614, "step": 13359 }, { "epoch": 24.65004616805171, "grad_norm": 0.5189815163612366, "learning_rate": 2.8881e-05, "loss": 0.0932, "step": 13360 }, { "epoch": 24.65189289012004, "grad_norm": 0.3309284448623657, "learning_rate": 2.888066666666667e-05, "loss": 0.1499, "step": 13361 }, { "epoch": 24.653739612188367, "grad_norm": 0.4587146043777466, "learning_rate": 2.8880333333333332e-05, "loss": 0.0822, "step": 13362 }, { "epoch": 24.655586334256693, "grad_norm": 0.34885290265083313, "learning_rate": 2.888e-05, "loss": 0.0672, "step": 13363 }, { "epoch": 24.657433056325022, "grad_norm": 0.38283026218414307, "learning_rate": 2.8879666666666667e-05, "loss": 0.0574, "step": 13364 }, { "epoch": 24.65927977839335, "grad_norm": 0.38761594891548157, "learning_rate": 2.8879333333333333e-05, "loss": 0.0586, "step": 13365 }, { "epoch": 24.66112650046168, "grad_norm": 0.3336412012577057, "learning_rate": 2.8879000000000003e-05, "loss": 0.0365, "step": 13366 }, { "epoch": 24.66297322253001, "grad_norm": 0.3232431411743164, "learning_rate": 2.887866666666667e-05, "loss": 0.03, "step": 13367 }, { "epoch": 24.66481994459834, "grad_norm": 0.5756363868713379, "learning_rate": 2.8878333333333334e-05, "loss": 0.0777, "step": 13368 }, { "epoch": 24.666666666666668, "grad_norm": 0.5154371857643127, "learning_rate": 2.8878e-05, "loss": 0.0313, "step": 13369 }, { "epoch": 24.668513388734997, "grad_norm": 0.35584941506385803, "learning_rate": 2.887766666666667e-05, "loss": 0.02, "step": 13370 }, { "epoch": 24.670360110803323, "grad_norm": 0.5981518030166626, "learning_rate": 2.8877333333333332e-05, "loss": 0.0728, "step": 13371 }, { "epoch": 24.67220683287165, "grad_norm": 0.4004424810409546, "learning_rate": 2.8877e-05, "loss": 0.066, "step": 13372 }, { "epoch": 24.67405355493998, "grad_norm": 0.27876365184783936, "learning_rate": 2.8876666666666667e-05, "loss": 0.0089, "step": 13373 }, { "epoch": 24.67590027700831, "grad_norm": 0.22904369235038757, "learning_rate": 2.8876333333333333e-05, "loss": 0.0088, "step": 13374 }, { "epoch": 24.67774699907664, "grad_norm": 0.2941538691520691, "learning_rate": 2.8876000000000003e-05, "loss": 0.0203, "step": 13375 }, { "epoch": 24.67959372114497, "grad_norm": 0.30584147572517395, "learning_rate": 2.8875666666666665e-05, "loss": 0.0091, "step": 13376 }, { "epoch": 24.681440443213297, "grad_norm": 0.31425368785858154, "learning_rate": 2.8875333333333335e-05, "loss": 0.0103, "step": 13377 }, { "epoch": 24.683287165281627, "grad_norm": 0.07397730648517609, "learning_rate": 2.8875e-05, "loss": 0.0021, "step": 13378 }, { "epoch": 24.685133887349952, "grad_norm": 0.19247840344905853, "learning_rate": 2.8874666666666666e-05, "loss": 0.005, "step": 13379 }, { "epoch": 24.68698060941828, "grad_norm": 0.30215951800346375, "learning_rate": 2.8874333333333332e-05, "loss": 0.0075, "step": 13380 }, { "epoch": 24.68882733148661, "grad_norm": 0.34927937388420105, "learning_rate": 2.8874000000000002e-05, "loss": 0.0176, "step": 13381 }, { "epoch": 24.69067405355494, "grad_norm": 0.17897507548332214, "learning_rate": 2.8873666666666668e-05, "loss": 0.0047, "step": 13382 }, { "epoch": 24.69252077562327, "grad_norm": 0.7408897280693054, "learning_rate": 2.8873333333333334e-05, "loss": 0.0087, "step": 13383 }, { "epoch": 24.694367497691598, "grad_norm": 0.16339300572872162, "learning_rate": 2.8873000000000003e-05, "loss": 0.0054, "step": 13384 }, { "epoch": 24.696214219759927, "grad_norm": 0.11997639387845993, "learning_rate": 2.8872666666666665e-05, "loss": 0.0025, "step": 13385 }, { "epoch": 24.698060941828256, "grad_norm": 0.618619978427887, "learning_rate": 2.8872333333333335e-05, "loss": 0.0377, "step": 13386 }, { "epoch": 24.69990766389658, "grad_norm": 0.08947763592004776, "learning_rate": 2.8872e-05, "loss": 0.0018, "step": 13387 }, { "epoch": 24.70175438596491, "grad_norm": 0.16694006323814392, "learning_rate": 2.8871666666666667e-05, "loss": 0.0054, "step": 13388 }, { "epoch": 24.70360110803324, "grad_norm": 0.44772008061408997, "learning_rate": 2.8871333333333333e-05, "loss": 0.0081, "step": 13389 }, { "epoch": 24.70544783010157, "grad_norm": 0.3581324815750122, "learning_rate": 2.8871000000000002e-05, "loss": 0.0362, "step": 13390 }, { "epoch": 24.7072945521699, "grad_norm": 0.17675131559371948, "learning_rate": 2.8870666666666668e-05, "loss": 0.0092, "step": 13391 }, { "epoch": 24.709141274238227, "grad_norm": 0.2986243665218353, "learning_rate": 2.8870333333333334e-05, "loss": 0.006, "step": 13392 }, { "epoch": 24.710987996306557, "grad_norm": 0.14071765542030334, "learning_rate": 2.8870000000000003e-05, "loss": 0.0032, "step": 13393 }, { "epoch": 24.712834718374886, "grad_norm": 0.3196142911911011, "learning_rate": 2.8869666666666666e-05, "loss": 0.0084, "step": 13394 }, { "epoch": 24.714681440443215, "grad_norm": 0.5192102789878845, "learning_rate": 2.8869333333333335e-05, "loss": 0.0095, "step": 13395 }, { "epoch": 24.71652816251154, "grad_norm": 0.21429091691970825, "learning_rate": 2.8869e-05, "loss": 0.0118, "step": 13396 }, { "epoch": 24.71837488457987, "grad_norm": 0.309095561504364, "learning_rate": 2.8868666666666667e-05, "loss": 0.0055, "step": 13397 }, { "epoch": 24.7202216066482, "grad_norm": 0.1888907253742218, "learning_rate": 2.8868333333333333e-05, "loss": 0.0069, "step": 13398 }, { "epoch": 24.722068328716528, "grad_norm": 0.3549118638038635, "learning_rate": 2.8868000000000002e-05, "loss": 0.0056, "step": 13399 }, { "epoch": 24.723915050784857, "grad_norm": 0.15123812854290009, "learning_rate": 2.8867666666666668e-05, "loss": 0.0048, "step": 13400 }, { "epoch": 24.725761772853186, "grad_norm": 0.18749882280826569, "learning_rate": 2.8867333333333334e-05, "loss": 0.0049, "step": 13401 }, { "epoch": 24.727608494921515, "grad_norm": 0.14293302595615387, "learning_rate": 2.8867000000000003e-05, "loss": 0.0025, "step": 13402 }, { "epoch": 24.729455216989845, "grad_norm": 0.14083927869796753, "learning_rate": 2.8866666666666666e-05, "loss": 0.0048, "step": 13403 }, { "epoch": 24.73130193905817, "grad_norm": 0.3582073450088501, "learning_rate": 2.8866333333333335e-05, "loss": 0.0081, "step": 13404 }, { "epoch": 24.7331486611265, "grad_norm": 0.32324060797691345, "learning_rate": 2.8866e-05, "loss": 0.0052, "step": 13405 }, { "epoch": 24.73499538319483, "grad_norm": 0.42408451437950134, "learning_rate": 2.8865666666666667e-05, "loss": 0.0073, "step": 13406 }, { "epoch": 24.736842105263158, "grad_norm": 0.7171687483787537, "learning_rate": 2.8865333333333336e-05, "loss": 0.0139, "step": 13407 }, { "epoch": 24.738688827331487, "grad_norm": 0.43229997158050537, "learning_rate": 2.8865e-05, "loss": 0.0085, "step": 13408 }, { "epoch": 24.740535549399816, "grad_norm": 0.7140260338783264, "learning_rate": 2.8864666666666668e-05, "loss": 0.2258, "step": 13409 }, { "epoch": 24.742382271468145, "grad_norm": 0.34567001461982727, "learning_rate": 2.8864333333333334e-05, "loss": 0.0694, "step": 13410 }, { "epoch": 24.744228993536474, "grad_norm": 0.6018345355987549, "learning_rate": 2.8864e-05, "loss": 0.1287, "step": 13411 }, { "epoch": 24.746075715604803, "grad_norm": 0.5588078498840332, "learning_rate": 2.8863666666666666e-05, "loss": 0.0671, "step": 13412 }, { "epoch": 24.74792243767313, "grad_norm": 0.4431043863296509, "learning_rate": 2.8863333333333335e-05, "loss": 0.0574, "step": 13413 }, { "epoch": 24.749769159741458, "grad_norm": 0.3815719187259674, "learning_rate": 2.8862999999999998e-05, "loss": 0.0652, "step": 13414 }, { "epoch": 24.751615881809787, "grad_norm": 0.4633784592151642, "learning_rate": 2.8862666666666667e-05, "loss": 0.0853, "step": 13415 }, { "epoch": 24.753462603878116, "grad_norm": 0.30097484588623047, "learning_rate": 2.8862333333333337e-05, "loss": 0.0583, "step": 13416 }, { "epoch": 24.755309325946445, "grad_norm": 0.3656047284603119, "learning_rate": 2.8862e-05, "loss": 0.0418, "step": 13417 }, { "epoch": 24.757156048014775, "grad_norm": 0.32067063450813293, "learning_rate": 2.886166666666667e-05, "loss": 0.0238, "step": 13418 }, { "epoch": 24.759002770083104, "grad_norm": 0.2664825916290283, "learning_rate": 2.8861333333333334e-05, "loss": 0.0244, "step": 13419 }, { "epoch": 24.760849492151433, "grad_norm": 0.261997789144516, "learning_rate": 2.8861e-05, "loss": 0.0197, "step": 13420 }, { "epoch": 24.76269621421976, "grad_norm": 0.3810383677482605, "learning_rate": 2.8860666666666666e-05, "loss": 0.035, "step": 13421 }, { "epoch": 24.764542936288088, "grad_norm": 0.26574939489364624, "learning_rate": 2.8860333333333336e-05, "loss": 0.0398, "step": 13422 }, { "epoch": 24.766389658356417, "grad_norm": 0.33361557126045227, "learning_rate": 2.8859999999999998e-05, "loss": 0.0297, "step": 13423 }, { "epoch": 24.768236380424746, "grad_norm": 0.19486159086227417, "learning_rate": 2.8859666666666667e-05, "loss": 0.0255, "step": 13424 }, { "epoch": 24.770083102493075, "grad_norm": 0.23205748200416565, "learning_rate": 2.8859333333333337e-05, "loss": 0.0089, "step": 13425 }, { "epoch": 24.771929824561404, "grad_norm": 0.28048232197761536, "learning_rate": 2.8859e-05, "loss": 0.0266, "step": 13426 }, { "epoch": 24.773776546629733, "grad_norm": 0.2901442348957062, "learning_rate": 2.885866666666667e-05, "loss": 0.0424, "step": 13427 }, { "epoch": 24.775623268698062, "grad_norm": 0.19676855206489563, "learning_rate": 2.8858333333333335e-05, "loss": 0.0249, "step": 13428 }, { "epoch": 24.777469990766388, "grad_norm": 0.1431822031736374, "learning_rate": 2.8858e-05, "loss": 0.0037, "step": 13429 }, { "epoch": 24.779316712834717, "grad_norm": 0.17462551593780518, "learning_rate": 2.8857666666666666e-05, "loss": 0.0206, "step": 13430 }, { "epoch": 24.781163434903046, "grad_norm": 0.130209818482399, "learning_rate": 2.8857333333333336e-05, "loss": 0.0037, "step": 13431 }, { "epoch": 24.783010156971375, "grad_norm": 0.22776542603969574, "learning_rate": 2.8857000000000002e-05, "loss": 0.0085, "step": 13432 }, { "epoch": 24.784856879039705, "grad_norm": 0.22143611311912537, "learning_rate": 2.8856666666666668e-05, "loss": 0.0063, "step": 13433 }, { "epoch": 24.786703601108034, "grad_norm": 0.15124769508838654, "learning_rate": 2.8856333333333337e-05, "loss": 0.0047, "step": 13434 }, { "epoch": 24.788550323176363, "grad_norm": 0.2723243832588196, "learning_rate": 2.8856e-05, "loss": 0.004, "step": 13435 }, { "epoch": 24.790397045244692, "grad_norm": 0.41200390458106995, "learning_rate": 2.885566666666667e-05, "loss": 0.0171, "step": 13436 }, { "epoch": 24.792243767313018, "grad_norm": 0.17938579618930817, "learning_rate": 2.885533333333333e-05, "loss": 0.0081, "step": 13437 }, { "epoch": 24.794090489381347, "grad_norm": 0.12062520533800125, "learning_rate": 2.8855e-05, "loss": 0.0021, "step": 13438 }, { "epoch": 24.795937211449676, "grad_norm": 0.5112106800079346, "learning_rate": 2.8854666666666667e-05, "loss": 0.014, "step": 13439 }, { "epoch": 24.797783933518005, "grad_norm": 0.21921005845069885, "learning_rate": 2.8854333333333333e-05, "loss": 0.0039, "step": 13440 }, { "epoch": 24.799630655586334, "grad_norm": 0.4647342562675476, "learning_rate": 2.8854000000000002e-05, "loss": 0.0102, "step": 13441 }, { "epoch": 24.801477377654663, "grad_norm": 0.17171858251094818, "learning_rate": 2.8853666666666668e-05, "loss": 0.0029, "step": 13442 }, { "epoch": 24.803324099722992, "grad_norm": 0.39785921573638916, "learning_rate": 2.8853333333333334e-05, "loss": 0.0057, "step": 13443 }, { "epoch": 24.80517082179132, "grad_norm": 0.1778843104839325, "learning_rate": 2.8853e-05, "loss": 0.0053, "step": 13444 }, { "epoch": 24.80701754385965, "grad_norm": 0.26482951641082764, "learning_rate": 2.885266666666667e-05, "loss": 0.0084, "step": 13445 }, { "epoch": 24.808864265927976, "grad_norm": 0.5930157899856567, "learning_rate": 2.885233333333333e-05, "loss": 0.0062, "step": 13446 }, { "epoch": 24.810710987996305, "grad_norm": 0.7362170815467834, "learning_rate": 2.8852e-05, "loss": 0.008, "step": 13447 }, { "epoch": 24.812557710064635, "grad_norm": 0.2874982953071594, "learning_rate": 2.8851666666666667e-05, "loss": 0.01, "step": 13448 }, { "epoch": 24.814404432132964, "grad_norm": 0.2321842610836029, "learning_rate": 2.8851333333333333e-05, "loss": 0.0048, "step": 13449 }, { "epoch": 24.816251154201293, "grad_norm": 0.550155520439148, "learning_rate": 2.8851000000000002e-05, "loss": 0.0074, "step": 13450 }, { "epoch": 24.818097876269622, "grad_norm": 0.4127471148967743, "learning_rate": 2.8850666666666668e-05, "loss": 0.007, "step": 13451 }, { "epoch": 24.81994459833795, "grad_norm": 0.33397412300109863, "learning_rate": 2.8850333333333334e-05, "loss": 0.0131, "step": 13452 }, { "epoch": 24.82179132040628, "grad_norm": 0.25564906001091003, "learning_rate": 2.885e-05, "loss": 0.0039, "step": 13453 }, { "epoch": 24.823638042474606, "grad_norm": 0.0578516349196434, "learning_rate": 2.884966666666667e-05, "loss": 0.0013, "step": 13454 }, { "epoch": 24.825484764542935, "grad_norm": 0.14229914546012878, "learning_rate": 2.8849333333333332e-05, "loss": 0.0038, "step": 13455 }, { "epoch": 24.827331486611264, "grad_norm": 0.5849806070327759, "learning_rate": 2.8849e-05, "loss": 0.0081, "step": 13456 }, { "epoch": 24.829178208679593, "grad_norm": 0.2880188524723053, "learning_rate": 2.884866666666667e-05, "loss": 0.0052, "step": 13457 }, { "epoch": 24.831024930747922, "grad_norm": 0.32646384835243225, "learning_rate": 2.8848333333333333e-05, "loss": 0.0126, "step": 13458 }, { "epoch": 24.83287165281625, "grad_norm": 0.651218593120575, "learning_rate": 2.8848000000000002e-05, "loss": 0.1134, "step": 13459 }, { "epoch": 24.83471837488458, "grad_norm": 0.3713066577911377, "learning_rate": 2.8847666666666668e-05, "loss": 0.0834, "step": 13460 }, { "epoch": 24.83656509695291, "grad_norm": 0.4958324730396271, "learning_rate": 2.8847333333333334e-05, "loss": 0.061, "step": 13461 }, { "epoch": 24.83841181902124, "grad_norm": 0.5096185803413391, "learning_rate": 2.8847e-05, "loss": 0.0809, "step": 13462 }, { "epoch": 24.840258541089565, "grad_norm": 0.2968968451023102, "learning_rate": 2.884666666666667e-05, "loss": 0.0972, "step": 13463 }, { "epoch": 24.842105263157894, "grad_norm": 0.3534603416919708, "learning_rate": 2.8846333333333332e-05, "loss": 0.0804, "step": 13464 }, { "epoch": 24.843951985226223, "grad_norm": 0.3805117607116699, "learning_rate": 2.8846e-05, "loss": 0.0425, "step": 13465 }, { "epoch": 24.845798707294552, "grad_norm": 0.2468799501657486, "learning_rate": 2.8845666666666667e-05, "loss": 0.0254, "step": 13466 }, { "epoch": 24.84764542936288, "grad_norm": 0.4495117962360382, "learning_rate": 2.8845333333333333e-05, "loss": 0.0336, "step": 13467 }, { "epoch": 24.84949215143121, "grad_norm": 0.38449251651763916, "learning_rate": 2.8845000000000003e-05, "loss": 0.0221, "step": 13468 }, { "epoch": 24.85133887349954, "grad_norm": 0.38495227694511414, "learning_rate": 2.8844666666666665e-05, "loss": 0.0217, "step": 13469 }, { "epoch": 24.85318559556787, "grad_norm": 0.5466144680976868, "learning_rate": 2.8844333333333334e-05, "loss": 0.0275, "step": 13470 }, { "epoch": 24.855032317636194, "grad_norm": 0.4924391508102417, "learning_rate": 2.8844e-05, "loss": 0.0384, "step": 13471 }, { "epoch": 24.856879039704523, "grad_norm": 0.33810392022132874, "learning_rate": 2.8843666666666666e-05, "loss": 0.0243, "step": 13472 }, { "epoch": 24.858725761772853, "grad_norm": 0.17373476922512054, "learning_rate": 2.8843333333333332e-05, "loss": 0.0079, "step": 13473 }, { "epoch": 24.86057248384118, "grad_norm": 0.5740019083023071, "learning_rate": 2.8843e-05, "loss": 0.0169, "step": 13474 }, { "epoch": 24.86241920590951, "grad_norm": 0.1319696605205536, "learning_rate": 2.8842666666666667e-05, "loss": 0.0051, "step": 13475 }, { "epoch": 24.86426592797784, "grad_norm": 0.46193742752075195, "learning_rate": 2.8842333333333333e-05, "loss": 0.0219, "step": 13476 }, { "epoch": 24.86611265004617, "grad_norm": 0.13610051572322845, "learning_rate": 2.8842000000000003e-05, "loss": 0.0043, "step": 13477 }, { "epoch": 24.8679593721145, "grad_norm": 0.2926333248615265, "learning_rate": 2.8841666666666665e-05, "loss": 0.0078, "step": 13478 }, { "epoch": 24.869806094182824, "grad_norm": 0.35700613260269165, "learning_rate": 2.8841333333333335e-05, "loss": 0.0068, "step": 13479 }, { "epoch": 24.871652816251153, "grad_norm": 0.4237408936023712, "learning_rate": 2.8841e-05, "loss": 0.0059, "step": 13480 }, { "epoch": 24.873499538319482, "grad_norm": 0.26153576374053955, "learning_rate": 2.8840666666666666e-05, "loss": 0.0112, "step": 13481 }, { "epoch": 24.87534626038781, "grad_norm": 0.22728842496871948, "learning_rate": 2.8840333333333336e-05, "loss": 0.0069, "step": 13482 }, { "epoch": 24.87719298245614, "grad_norm": 0.22643160820007324, "learning_rate": 2.8840000000000002e-05, "loss": 0.0142, "step": 13483 }, { "epoch": 24.87903970452447, "grad_norm": 0.2977308928966522, "learning_rate": 2.8839666666666668e-05, "loss": 0.0188, "step": 13484 }, { "epoch": 24.8808864265928, "grad_norm": 0.17739634215831757, "learning_rate": 2.8839333333333334e-05, "loss": 0.0048, "step": 13485 }, { "epoch": 24.882733148661128, "grad_norm": 0.1741904318332672, "learning_rate": 2.8839000000000003e-05, "loss": 0.0024, "step": 13486 }, { "epoch": 24.884579870729453, "grad_norm": 0.2034701704978943, "learning_rate": 2.8838666666666665e-05, "loss": 0.0073, "step": 13487 }, { "epoch": 24.886426592797783, "grad_norm": 0.24229569733142853, "learning_rate": 2.8838333333333335e-05, "loss": 0.0707, "step": 13488 }, { "epoch": 24.88827331486611, "grad_norm": 0.49798405170440674, "learning_rate": 2.8838e-05, "loss": 0.0124, "step": 13489 }, { "epoch": 24.89012003693444, "grad_norm": 0.29836997389793396, "learning_rate": 2.8837666666666667e-05, "loss": 0.0045, "step": 13490 }, { "epoch": 24.89196675900277, "grad_norm": 0.2838921546936035, "learning_rate": 2.8837333333333336e-05, "loss": 0.0053, "step": 13491 }, { "epoch": 24.8938134810711, "grad_norm": 0.2837573289871216, "learning_rate": 2.8837000000000002e-05, "loss": 0.0093, "step": 13492 }, { "epoch": 24.89566020313943, "grad_norm": 0.12989038228988647, "learning_rate": 2.8836666666666668e-05, "loss": 0.0037, "step": 13493 }, { "epoch": 24.897506925207757, "grad_norm": 0.22318127751350403, "learning_rate": 2.8836333333333334e-05, "loss": 0.0057, "step": 13494 }, { "epoch": 24.899353647276087, "grad_norm": 0.30415669083595276, "learning_rate": 2.8836000000000003e-05, "loss": 0.01, "step": 13495 }, { "epoch": 24.901200369344412, "grad_norm": 0.11953115463256836, "learning_rate": 2.8835666666666666e-05, "loss": 0.0038, "step": 13496 }, { "epoch": 24.90304709141274, "grad_norm": 0.3473064601421356, "learning_rate": 2.8835333333333335e-05, "loss": 0.0108, "step": 13497 }, { "epoch": 24.90489381348107, "grad_norm": 0.30331888794898987, "learning_rate": 2.8834999999999998e-05, "loss": 0.0097, "step": 13498 }, { "epoch": 24.9067405355494, "grad_norm": 0.4642525315284729, "learning_rate": 2.8834666666666667e-05, "loss": 0.013, "step": 13499 }, { "epoch": 24.90858725761773, "grad_norm": 0.23262901604175568, "learning_rate": 2.8834333333333336e-05, "loss": 0.0106, "step": 13500 }, { "epoch": 24.910433979686058, "grad_norm": 0.22102048993110657, "learning_rate": 2.8834e-05, "loss": 0.0037, "step": 13501 }, { "epoch": 24.912280701754387, "grad_norm": 0.44870537519454956, "learning_rate": 2.8833666666666668e-05, "loss": 0.0084, "step": 13502 }, { "epoch": 24.914127423822716, "grad_norm": 0.1932738572359085, "learning_rate": 2.8833333333333334e-05, "loss": 0.0049, "step": 13503 }, { "epoch": 24.91597414589104, "grad_norm": 0.3363226056098938, "learning_rate": 2.8833e-05, "loss": 0.0096, "step": 13504 }, { "epoch": 24.91782086795937, "grad_norm": 0.2664887309074402, "learning_rate": 2.8832666666666666e-05, "loss": 0.0088, "step": 13505 }, { "epoch": 24.9196675900277, "grad_norm": 0.34887585043907166, "learning_rate": 2.8832333333333335e-05, "loss": 0.0173, "step": 13506 }, { "epoch": 24.92151431209603, "grad_norm": 0.1695641726255417, "learning_rate": 2.8832e-05, "loss": 0.0062, "step": 13507 }, { "epoch": 24.92336103416436, "grad_norm": 0.09887981414794922, "learning_rate": 2.8831666666666667e-05, "loss": 0.0023, "step": 13508 }, { "epoch": 24.925207756232687, "grad_norm": 0.6092426180839539, "learning_rate": 2.8831333333333336e-05, "loss": 0.1358, "step": 13509 }, { "epoch": 24.927054478301017, "grad_norm": 0.3662014603614807, "learning_rate": 2.8831e-05, "loss": 0.0617, "step": 13510 }, { "epoch": 24.928901200369346, "grad_norm": 0.38646215200424194, "learning_rate": 2.8830666666666668e-05, "loss": 0.0661, "step": 13511 }, { "epoch": 24.930747922437675, "grad_norm": 0.6269811987876892, "learning_rate": 2.8830333333333334e-05, "loss": 0.1526, "step": 13512 }, { "epoch": 24.932594644506, "grad_norm": 0.3474038243293762, "learning_rate": 2.883e-05, "loss": 0.0508, "step": 13513 }, { "epoch": 24.93444136657433, "grad_norm": 0.3669436573982239, "learning_rate": 2.8829666666666666e-05, "loss": 0.0335, "step": 13514 }, { "epoch": 24.93628808864266, "grad_norm": 0.3087623119354248, "learning_rate": 2.8829333333333335e-05, "loss": 0.0312, "step": 13515 }, { "epoch": 24.938134810710988, "grad_norm": 0.4781001806259155, "learning_rate": 2.8829e-05, "loss": 0.0507, "step": 13516 }, { "epoch": 24.939981532779317, "grad_norm": 0.26677459478378296, "learning_rate": 2.8828666666666667e-05, "loss": 0.0321, "step": 13517 }, { "epoch": 24.941828254847646, "grad_norm": 0.3729875981807709, "learning_rate": 2.8828333333333337e-05, "loss": 0.0253, "step": 13518 }, { "epoch": 24.943674976915975, "grad_norm": 0.3847418427467346, "learning_rate": 2.8828e-05, "loss": 0.0217, "step": 13519 }, { "epoch": 24.945521698984304, "grad_norm": 0.3041211664676666, "learning_rate": 2.882766666666667e-05, "loss": 0.0143, "step": 13520 }, { "epoch": 24.94736842105263, "grad_norm": 0.6895516514778137, "learning_rate": 2.8827333333333334e-05, "loss": 0.0477, "step": 13521 }, { "epoch": 24.94921514312096, "grad_norm": 0.6736093163490295, "learning_rate": 2.8827e-05, "loss": 0.0163, "step": 13522 }, { "epoch": 24.95106186518929, "grad_norm": 0.15927226841449738, "learning_rate": 2.8826666666666666e-05, "loss": 0.0078, "step": 13523 }, { "epoch": 24.952908587257618, "grad_norm": 0.2903159558773041, "learning_rate": 2.8826333333333336e-05, "loss": 0.0118, "step": 13524 }, { "epoch": 24.954755309325947, "grad_norm": 0.23991265892982483, "learning_rate": 2.8826e-05, "loss": 0.0081, "step": 13525 }, { "epoch": 24.956602031394276, "grad_norm": 0.17719998955726624, "learning_rate": 2.8825666666666667e-05, "loss": 0.008, "step": 13526 }, { "epoch": 24.958448753462605, "grad_norm": 0.266058087348938, "learning_rate": 2.8825333333333333e-05, "loss": 0.0066, "step": 13527 }, { "epoch": 24.960295475530934, "grad_norm": 0.16486336290836334, "learning_rate": 2.8825e-05, "loss": 0.007, "step": 13528 }, { "epoch": 24.96214219759926, "grad_norm": 0.10596636682748795, "learning_rate": 2.882466666666667e-05, "loss": 0.0033, "step": 13529 }, { "epoch": 24.96398891966759, "grad_norm": 0.43525850772857666, "learning_rate": 2.882433333333333e-05, "loss": 0.0086, "step": 13530 }, { "epoch": 24.965835641735918, "grad_norm": 0.17290306091308594, "learning_rate": 2.8824e-05, "loss": 0.008, "step": 13531 }, { "epoch": 24.967682363804247, "grad_norm": 0.3244077265262604, "learning_rate": 2.882366666666667e-05, "loss": 0.0079, "step": 13532 }, { "epoch": 24.969529085872576, "grad_norm": 0.16322195529937744, "learning_rate": 2.8823333333333332e-05, "loss": 0.0047, "step": 13533 }, { "epoch": 24.971375807940905, "grad_norm": 0.09850552678108215, "learning_rate": 2.8823000000000002e-05, "loss": 0.0026, "step": 13534 }, { "epoch": 24.973222530009235, "grad_norm": 0.17957203090190887, "learning_rate": 2.8822666666666668e-05, "loss": 0.008, "step": 13535 }, { "epoch": 24.975069252077564, "grad_norm": 0.42165297269821167, "learning_rate": 2.8822333333333334e-05, "loss": 0.0103, "step": 13536 }, { "epoch": 24.97691597414589, "grad_norm": 0.21143174171447754, "learning_rate": 2.8822e-05, "loss": 0.0082, "step": 13537 }, { "epoch": 24.97876269621422, "grad_norm": 0.1594984382390976, "learning_rate": 2.882166666666667e-05, "loss": 0.0063, "step": 13538 }, { "epoch": 24.980609418282548, "grad_norm": 0.13408170640468597, "learning_rate": 2.882133333333333e-05, "loss": 0.0041, "step": 13539 }, { "epoch": 24.982456140350877, "grad_norm": 0.19753006100654602, "learning_rate": 2.8821e-05, "loss": 0.0056, "step": 13540 }, { "epoch": 24.984302862419206, "grad_norm": 0.21510076522827148, "learning_rate": 2.882066666666667e-05, "loss": 0.0076, "step": 13541 }, { "epoch": 24.986149584487535, "grad_norm": 0.10175257921218872, "learning_rate": 2.8820333333333333e-05, "loss": 0.003, "step": 13542 }, { "epoch": 24.987996306555864, "grad_norm": 0.10908100754022598, "learning_rate": 2.8820000000000002e-05, "loss": 0.0031, "step": 13543 }, { "epoch": 24.989843028624193, "grad_norm": 0.37116262316703796, "learning_rate": 2.8819666666666668e-05, "loss": 0.0077, "step": 13544 }, { "epoch": 24.991689750692522, "grad_norm": 0.13784131407737732, "learning_rate": 2.8819333333333334e-05, "loss": 0.0027, "step": 13545 }, { "epoch": 24.993536472760848, "grad_norm": 0.1495315283536911, "learning_rate": 2.8819e-05, "loss": 0.0063, "step": 13546 }, { "epoch": 24.995383194829177, "grad_norm": 0.17947818338871002, "learning_rate": 2.881866666666667e-05, "loss": 0.004, "step": 13547 }, { "epoch": 24.997229916897506, "grad_norm": 0.22032637894153595, "learning_rate": 2.881833333333333e-05, "loss": 0.0035, "step": 13548 }, { "epoch": 24.999076638965835, "grad_norm": 0.08717142790555954, "learning_rate": 2.8818e-05, "loss": 0.002, "step": 13549 }, { "epoch": 25.0, "grad_norm": 0.766228973865509, "learning_rate": 2.881766666666667e-05, "loss": 0.0297, "step": 13550 }, { "epoch": 25.00184672206833, "grad_norm": 0.502040445804596, "learning_rate": 2.8817333333333333e-05, "loss": 0.1689, "step": 13551 }, { "epoch": 25.00369344413666, "grad_norm": 0.41068142652511597, "learning_rate": 2.8817000000000002e-05, "loss": 0.0909, "step": 13552 }, { "epoch": 25.005540166204987, "grad_norm": 0.3879607319831848, "learning_rate": 2.8816666666666668e-05, "loss": 0.0564, "step": 13553 }, { "epoch": 25.007386888273317, "grad_norm": 0.31146150827407837, "learning_rate": 2.8816333333333334e-05, "loss": 0.0549, "step": 13554 }, { "epoch": 25.009233610341642, "grad_norm": 0.515511691570282, "learning_rate": 2.8816e-05, "loss": 0.0529, "step": 13555 }, { "epoch": 25.01108033240997, "grad_norm": 0.349189817905426, "learning_rate": 2.881566666666667e-05, "loss": 0.0903, "step": 13556 }, { "epoch": 25.0129270544783, "grad_norm": 0.5013755559921265, "learning_rate": 2.8815333333333335e-05, "loss": 0.0776, "step": 13557 }, { "epoch": 25.01477377654663, "grad_norm": 0.42771899700164795, "learning_rate": 2.8815e-05, "loss": 0.0319, "step": 13558 }, { "epoch": 25.01662049861496, "grad_norm": 0.22613854706287384, "learning_rate": 2.8814666666666667e-05, "loss": 0.0173, "step": 13559 }, { "epoch": 25.018467220683288, "grad_norm": 0.34730762243270874, "learning_rate": 2.8814333333333333e-05, "loss": 0.0725, "step": 13560 }, { "epoch": 25.020313942751617, "grad_norm": 0.24891577661037445, "learning_rate": 2.8814000000000002e-05, "loss": 0.0161, "step": 13561 }, { "epoch": 25.022160664819946, "grad_norm": 0.4423004388809204, "learning_rate": 2.8813666666666665e-05, "loss": 0.0184, "step": 13562 }, { "epoch": 25.02400738688827, "grad_norm": 0.2701072096824646, "learning_rate": 2.8813333333333334e-05, "loss": 0.0303, "step": 13563 }, { "epoch": 25.0258541089566, "grad_norm": 0.29532355070114136, "learning_rate": 2.8813e-05, "loss": 0.0128, "step": 13564 }, { "epoch": 25.02770083102493, "grad_norm": 0.27251678705215454, "learning_rate": 2.8812666666666666e-05, "loss": 0.0256, "step": 13565 }, { "epoch": 25.02954755309326, "grad_norm": 0.2066493034362793, "learning_rate": 2.8812333333333335e-05, "loss": 0.0235, "step": 13566 }, { "epoch": 25.03139427516159, "grad_norm": 0.12427642196416855, "learning_rate": 2.8812e-05, "loss": 0.003, "step": 13567 }, { "epoch": 25.033240997229917, "grad_norm": 0.3608299493789673, "learning_rate": 2.8811666666666667e-05, "loss": 0.0099, "step": 13568 }, { "epoch": 25.035087719298247, "grad_norm": 0.1806800216436386, "learning_rate": 2.8811333333333333e-05, "loss": 0.0179, "step": 13569 }, { "epoch": 25.036934441366576, "grad_norm": 0.5715934634208679, "learning_rate": 2.8811000000000002e-05, "loss": 0.0304, "step": 13570 }, { "epoch": 25.0387811634349, "grad_norm": 0.10905469208955765, "learning_rate": 2.8810666666666665e-05, "loss": 0.0012, "step": 13571 }, { "epoch": 25.04062788550323, "grad_norm": 0.15964914858341217, "learning_rate": 2.8810333333333334e-05, "loss": 0.0043, "step": 13572 }, { "epoch": 25.04247460757156, "grad_norm": 0.07477544993162155, "learning_rate": 2.881e-05, "loss": 0.002, "step": 13573 }, { "epoch": 25.04432132963989, "grad_norm": 0.11960326135158539, "learning_rate": 2.8809666666666666e-05, "loss": 0.0009, "step": 13574 }, { "epoch": 25.046168051708218, "grad_norm": 0.17168718576431274, "learning_rate": 2.8809333333333336e-05, "loss": 0.0036, "step": 13575 }, { "epoch": 25.048014773776547, "grad_norm": 0.14043472707271576, "learning_rate": 2.8809e-05, "loss": 0.0041, "step": 13576 }, { "epoch": 25.049861495844876, "grad_norm": 0.18809548020362854, "learning_rate": 2.8808666666666667e-05, "loss": 0.0031, "step": 13577 }, { "epoch": 25.051708217913205, "grad_norm": 0.16893936693668365, "learning_rate": 2.8808333333333333e-05, "loss": 0.0043, "step": 13578 }, { "epoch": 25.053554939981534, "grad_norm": 0.5226263403892517, "learning_rate": 2.8808000000000003e-05, "loss": 0.0116, "step": 13579 }, { "epoch": 25.05540166204986, "grad_norm": 0.7630420923233032, "learning_rate": 2.8807666666666665e-05, "loss": 0.0162, "step": 13580 }, { "epoch": 25.05724838411819, "grad_norm": 0.17915871739387512, "learning_rate": 2.8807333333333335e-05, "loss": 0.0222, "step": 13581 }, { "epoch": 25.05909510618652, "grad_norm": 0.30622854828834534, "learning_rate": 2.8807000000000004e-05, "loss": 0.0036, "step": 13582 }, { "epoch": 25.060941828254848, "grad_norm": 0.16288454830646515, "learning_rate": 2.8806666666666666e-05, "loss": 0.0027, "step": 13583 }, { "epoch": 25.062788550323177, "grad_norm": 0.41513580083847046, "learning_rate": 2.8806333333333336e-05, "loss": 0.0124, "step": 13584 }, { "epoch": 25.064635272391506, "grad_norm": 0.28535616397857666, "learning_rate": 2.8806e-05, "loss": 0.0056, "step": 13585 }, { "epoch": 25.066481994459835, "grad_norm": 0.3623763620853424, "learning_rate": 2.8805666666666668e-05, "loss": 0.0071, "step": 13586 }, { "epoch": 25.068328716528164, "grad_norm": 0.14072564244270325, "learning_rate": 2.8805333333333334e-05, "loss": 0.0022, "step": 13587 }, { "epoch": 25.07017543859649, "grad_norm": 0.3874894082546234, "learning_rate": 2.8805e-05, "loss": 0.0103, "step": 13588 }, { "epoch": 25.07202216066482, "grad_norm": 0.08016061782836914, "learning_rate": 2.8804666666666665e-05, "loss": 0.0011, "step": 13589 }, { "epoch": 25.073868882733148, "grad_norm": 0.22445538640022278, "learning_rate": 2.8804333333333335e-05, "loss": 0.0054, "step": 13590 }, { "epoch": 25.075715604801477, "grad_norm": 0.11151864379644394, "learning_rate": 2.8804e-05, "loss": 0.0022, "step": 13591 }, { "epoch": 25.077562326869806, "grad_norm": 0.2736489176750183, "learning_rate": 2.8803666666666667e-05, "loss": 0.0089, "step": 13592 }, { "epoch": 25.079409048938135, "grad_norm": 0.14825448393821716, "learning_rate": 2.8803333333333336e-05, "loss": 0.0024, "step": 13593 }, { "epoch": 25.081255771006465, "grad_norm": 0.2685433626174927, "learning_rate": 2.8803e-05, "loss": 0.0032, "step": 13594 }, { "epoch": 25.083102493074794, "grad_norm": 0.17419521510601044, "learning_rate": 2.8802666666666668e-05, "loss": 0.0026, "step": 13595 }, { "epoch": 25.08494921514312, "grad_norm": 0.2433479279279709, "learning_rate": 2.8802333333333334e-05, "loss": 0.0058, "step": 13596 }, { "epoch": 25.08679593721145, "grad_norm": 0.15277212858200073, "learning_rate": 2.8802e-05, "loss": 0.0035, "step": 13597 }, { "epoch": 25.088642659279778, "grad_norm": 0.20970101654529572, "learning_rate": 2.8801666666666666e-05, "loss": 0.0017, "step": 13598 }, { "epoch": 25.090489381348107, "grad_norm": 0.12431564927101135, "learning_rate": 2.8801333333333335e-05, "loss": 0.0021, "step": 13599 }, { "epoch": 25.092336103416436, "grad_norm": 0.5341439843177795, "learning_rate": 2.8801e-05, "loss": 0.0074, "step": 13600 }, { "epoch": 25.094182825484765, "grad_norm": 0.7216334342956543, "learning_rate": 2.8800666666666667e-05, "loss": 0.1557, "step": 13601 }, { "epoch": 25.096029547553094, "grad_norm": 0.6455937027931213, "learning_rate": 2.8800333333333336e-05, "loss": 0.0688, "step": 13602 }, { "epoch": 25.097876269621423, "grad_norm": 0.36682724952697754, "learning_rate": 2.88e-05, "loss": 0.0601, "step": 13603 }, { "epoch": 25.099722991689752, "grad_norm": 0.42346009612083435, "learning_rate": 2.8799666666666668e-05, "loss": 0.0536, "step": 13604 }, { "epoch": 25.101569713758078, "grad_norm": 0.6256864666938782, "learning_rate": 2.8799333333333334e-05, "loss": 0.0467, "step": 13605 }, { "epoch": 25.103416435826407, "grad_norm": 0.35584160685539246, "learning_rate": 2.8799e-05, "loss": 0.0529, "step": 13606 }, { "epoch": 25.105263157894736, "grad_norm": 0.29889115691185, "learning_rate": 2.879866666666667e-05, "loss": 0.0317, "step": 13607 }, { "epoch": 25.107109879963065, "grad_norm": 0.37145760655403137, "learning_rate": 2.8798333333333335e-05, "loss": 0.0296, "step": 13608 }, { "epoch": 25.108956602031395, "grad_norm": 0.4296289086341858, "learning_rate": 2.8798e-05, "loss": 0.0252, "step": 13609 }, { "epoch": 25.110803324099724, "grad_norm": 0.3164059817790985, "learning_rate": 2.8797666666666667e-05, "loss": 0.0168, "step": 13610 }, { "epoch": 25.112650046168053, "grad_norm": 0.31333017349243164, "learning_rate": 2.8797333333333336e-05, "loss": 0.0176, "step": 13611 }, { "epoch": 25.114496768236382, "grad_norm": 0.31346407532691956, "learning_rate": 2.8797e-05, "loss": 0.0344, "step": 13612 }, { "epoch": 25.116343490304708, "grad_norm": 0.43452003598213196, "learning_rate": 2.8796666666666668e-05, "loss": 0.0455, "step": 13613 }, { "epoch": 25.118190212373037, "grad_norm": 0.3308866322040558, "learning_rate": 2.8796333333333334e-05, "loss": 0.0211, "step": 13614 }, { "epoch": 25.120036934441366, "grad_norm": 0.1883377730846405, "learning_rate": 2.8796e-05, "loss": 0.0078, "step": 13615 }, { "epoch": 25.121883656509695, "grad_norm": 0.15821512043476105, "learning_rate": 2.879566666666667e-05, "loss": 0.0049, "step": 13616 }, { "epoch": 25.123730378578024, "grad_norm": 0.21383026242256165, "learning_rate": 2.8795333333333332e-05, "loss": 0.0052, "step": 13617 }, { "epoch": 25.125577100646353, "grad_norm": 0.2946529984474182, "learning_rate": 2.8795e-05, "loss": 0.0084, "step": 13618 }, { "epoch": 25.127423822714682, "grad_norm": 0.5541951656341553, "learning_rate": 2.8794666666666667e-05, "loss": 0.0091, "step": 13619 }, { "epoch": 25.12927054478301, "grad_norm": 0.1361600160598755, "learning_rate": 2.8794333333333333e-05, "loss": 0.0047, "step": 13620 }, { "epoch": 25.131117266851337, "grad_norm": 0.18995541334152222, "learning_rate": 2.8794e-05, "loss": 0.0068, "step": 13621 }, { "epoch": 25.132963988919666, "grad_norm": 0.15388409793376923, "learning_rate": 2.879366666666667e-05, "loss": 0.0061, "step": 13622 }, { "epoch": 25.134810710987995, "grad_norm": 0.29169967770576477, "learning_rate": 2.879333333333333e-05, "loss": 0.0126, "step": 13623 }, { "epoch": 25.136657433056325, "grad_norm": 0.8038426041603088, "learning_rate": 2.8793e-05, "loss": 0.0102, "step": 13624 }, { "epoch": 25.138504155124654, "grad_norm": 0.24247054755687714, "learning_rate": 2.879266666666667e-05, "loss": 0.0061, "step": 13625 }, { "epoch": 25.140350877192983, "grad_norm": 1.249650001525879, "learning_rate": 2.8792333333333332e-05, "loss": 0.0122, "step": 13626 }, { "epoch": 25.142197599261312, "grad_norm": 0.27134811878204346, "learning_rate": 2.8792e-05, "loss": 0.0089, "step": 13627 }, { "epoch": 25.14404432132964, "grad_norm": 0.33459794521331787, "learning_rate": 2.8791666666666667e-05, "loss": 0.0147, "step": 13628 }, { "epoch": 25.14589104339797, "grad_norm": 0.42010146379470825, "learning_rate": 2.8791333333333333e-05, "loss": 0.0053, "step": 13629 }, { "epoch": 25.147737765466296, "grad_norm": 0.16870661079883575, "learning_rate": 2.8791e-05, "loss": 0.0063, "step": 13630 }, { "epoch": 25.149584487534625, "grad_norm": 0.2020006626844406, "learning_rate": 2.879066666666667e-05, "loss": 0.0045, "step": 13631 }, { "epoch": 25.151431209602954, "grad_norm": 0.273642361164093, "learning_rate": 2.8790333333333335e-05, "loss": 0.0271, "step": 13632 }, { "epoch": 25.153277931671283, "grad_norm": 0.32549455761909485, "learning_rate": 2.879e-05, "loss": 0.0083, "step": 13633 }, { "epoch": 25.155124653739612, "grad_norm": 0.3531877398490906, "learning_rate": 2.878966666666667e-05, "loss": 0.0069, "step": 13634 }, { "epoch": 25.15697137580794, "grad_norm": 0.16902898252010345, "learning_rate": 2.8789333333333332e-05, "loss": 0.0033, "step": 13635 }, { "epoch": 25.15881809787627, "grad_norm": 0.20311951637268066, "learning_rate": 2.8789e-05, "loss": 0.0038, "step": 13636 }, { "epoch": 25.1606648199446, "grad_norm": 0.15935593843460083, "learning_rate": 2.8788666666666668e-05, "loss": 0.0042, "step": 13637 }, { "epoch": 25.162511542012926, "grad_norm": 0.642253041267395, "learning_rate": 2.8788333333333334e-05, "loss": 0.0058, "step": 13638 }, { "epoch": 25.164358264081255, "grad_norm": 0.20698927342891693, "learning_rate": 2.8788e-05, "loss": 0.0045, "step": 13639 }, { "epoch": 25.166204986149584, "grad_norm": 0.16226263344287872, "learning_rate": 2.878766666666667e-05, "loss": 0.0027, "step": 13640 }, { "epoch": 25.168051708217913, "grad_norm": 0.19789494574069977, "learning_rate": 2.8787333333333335e-05, "loss": 0.0033, "step": 13641 }, { "epoch": 25.169898430286242, "grad_norm": 0.4175894856452942, "learning_rate": 2.8787e-05, "loss": 0.0059, "step": 13642 }, { "epoch": 25.17174515235457, "grad_norm": 0.25816839933395386, "learning_rate": 2.878666666666667e-05, "loss": 0.0048, "step": 13643 }, { "epoch": 25.1735918744229, "grad_norm": 0.24436330795288086, "learning_rate": 2.8786333333333333e-05, "loss": 0.006, "step": 13644 }, { "epoch": 25.17543859649123, "grad_norm": 0.26902177929878235, "learning_rate": 2.8786000000000002e-05, "loss": 0.0044, "step": 13645 }, { "epoch": 25.177285318559555, "grad_norm": 0.6639602184295654, "learning_rate": 2.8785666666666668e-05, "loss": 0.0166, "step": 13646 }, { "epoch": 25.179132040627884, "grad_norm": 0.05455506592988968, "learning_rate": 2.8785333333333334e-05, "loss": 0.0012, "step": 13647 }, { "epoch": 25.180978762696213, "grad_norm": 0.29231777787208557, "learning_rate": 2.8785e-05, "loss": 0.0068, "step": 13648 }, { "epoch": 25.182825484764543, "grad_norm": 0.08994735032320023, "learning_rate": 2.8784666666666666e-05, "loss": 0.0021, "step": 13649 }, { "epoch": 25.18467220683287, "grad_norm": 0.4141843318939209, "learning_rate": 2.8784333333333335e-05, "loss": 0.0105, "step": 13650 }, { "epoch": 25.1865189289012, "grad_norm": 0.45748546719551086, "learning_rate": 2.8784e-05, "loss": 0.1008, "step": 13651 }, { "epoch": 25.18836565096953, "grad_norm": 0.4383915662765503, "learning_rate": 2.8783666666666667e-05, "loss": 0.1024, "step": 13652 }, { "epoch": 25.19021237303786, "grad_norm": 0.5043680667877197, "learning_rate": 2.8783333333333333e-05, "loss": 0.1288, "step": 13653 }, { "epoch": 25.19205909510619, "grad_norm": 0.44734859466552734, "learning_rate": 2.8783000000000002e-05, "loss": 0.0724, "step": 13654 }, { "epoch": 25.193905817174514, "grad_norm": 0.7525545358657837, "learning_rate": 2.8782666666666665e-05, "loss": 0.0877, "step": 13655 }, { "epoch": 25.195752539242843, "grad_norm": 0.43058881163597107, "learning_rate": 2.8782333333333334e-05, "loss": 0.0608, "step": 13656 }, { "epoch": 25.197599261311172, "grad_norm": 0.6278600096702576, "learning_rate": 2.8782000000000003e-05, "loss": 0.0535, "step": 13657 }, { "epoch": 25.1994459833795, "grad_norm": 0.40241897106170654, "learning_rate": 2.8781666666666666e-05, "loss": 0.0471, "step": 13658 }, { "epoch": 25.20129270544783, "grad_norm": 0.35044482350349426, "learning_rate": 2.8781333333333335e-05, "loss": 0.0289, "step": 13659 }, { "epoch": 25.20313942751616, "grad_norm": 0.555040717124939, "learning_rate": 2.8781e-05, "loss": 0.0781, "step": 13660 }, { "epoch": 25.20498614958449, "grad_norm": 0.5007652640342712, "learning_rate": 2.8780666666666667e-05, "loss": 0.0326, "step": 13661 }, { "epoch": 25.206832871652818, "grad_norm": 0.4233347475528717, "learning_rate": 2.8780333333333333e-05, "loss": 0.033, "step": 13662 }, { "epoch": 25.208679593721143, "grad_norm": 0.4547087252140045, "learning_rate": 2.8780000000000002e-05, "loss": 0.0214, "step": 13663 }, { "epoch": 25.210526315789473, "grad_norm": 0.31385135650634766, "learning_rate": 2.8779666666666665e-05, "loss": 0.016, "step": 13664 }, { "epoch": 25.2123730378578, "grad_norm": 0.7495867609977722, "learning_rate": 2.8779333333333334e-05, "loss": 0.0244, "step": 13665 }, { "epoch": 25.21421975992613, "grad_norm": 0.4695708453655243, "learning_rate": 2.8779000000000003e-05, "loss": 0.013, "step": 13666 }, { "epoch": 25.21606648199446, "grad_norm": 0.16245493292808533, "learning_rate": 2.8778666666666666e-05, "loss": 0.0057, "step": 13667 }, { "epoch": 25.21791320406279, "grad_norm": 0.29763901233673096, "learning_rate": 2.8778333333333335e-05, "loss": 0.0103, "step": 13668 }, { "epoch": 25.21975992613112, "grad_norm": 0.1941731572151184, "learning_rate": 2.8778e-05, "loss": 0.0107, "step": 13669 }, { "epoch": 25.221606648199447, "grad_norm": 0.3459717929363251, "learning_rate": 2.8777666666666667e-05, "loss": 0.0147, "step": 13670 }, { "epoch": 25.223453370267773, "grad_norm": 0.375107079744339, "learning_rate": 2.8777333333333333e-05, "loss": 0.0133, "step": 13671 }, { "epoch": 25.225300092336102, "grad_norm": 0.3817448616027832, "learning_rate": 2.8777000000000002e-05, "loss": 0.0231, "step": 13672 }, { "epoch": 25.22714681440443, "grad_norm": 0.2049887627363205, "learning_rate": 2.8776666666666665e-05, "loss": 0.002, "step": 13673 }, { "epoch": 25.22899353647276, "grad_norm": 0.3521776497364044, "learning_rate": 2.8776333333333334e-05, "loss": 0.0086, "step": 13674 }, { "epoch": 25.23084025854109, "grad_norm": 0.04273456335067749, "learning_rate": 2.8776000000000004e-05, "loss": 0.0011, "step": 13675 }, { "epoch": 25.23268698060942, "grad_norm": 0.2826165556907654, "learning_rate": 2.8775666666666666e-05, "loss": 0.0071, "step": 13676 }, { "epoch": 25.234533702677748, "grad_norm": 0.17444173991680145, "learning_rate": 2.8775333333333336e-05, "loss": 0.0072, "step": 13677 }, { "epoch": 25.236380424746077, "grad_norm": 0.27987492084503174, "learning_rate": 2.8774999999999998e-05, "loss": 0.0048, "step": 13678 }, { "epoch": 25.238227146814406, "grad_norm": 0.35382506251335144, "learning_rate": 2.8774666666666667e-05, "loss": 0.0044, "step": 13679 }, { "epoch": 25.24007386888273, "grad_norm": 0.22350679337978363, "learning_rate": 2.8774333333333333e-05, "loss": 0.0063, "step": 13680 }, { "epoch": 25.24192059095106, "grad_norm": 0.2280522584915161, "learning_rate": 2.8774e-05, "loss": 0.0049, "step": 13681 }, { "epoch": 25.24376731301939, "grad_norm": 0.22186324000358582, "learning_rate": 2.877366666666667e-05, "loss": 0.0064, "step": 13682 }, { "epoch": 25.24561403508772, "grad_norm": 0.30040767788887024, "learning_rate": 2.8773333333333335e-05, "loss": 0.0096, "step": 13683 }, { "epoch": 25.24746075715605, "grad_norm": 0.12468577921390533, "learning_rate": 2.8773e-05, "loss": 0.0029, "step": 13684 }, { "epoch": 25.249307479224377, "grad_norm": 0.2888202667236328, "learning_rate": 2.8772666666666666e-05, "loss": 0.0055, "step": 13685 }, { "epoch": 25.251154201292707, "grad_norm": 0.49866822361946106, "learning_rate": 2.8772333333333336e-05, "loss": 0.0086, "step": 13686 }, { "epoch": 25.253000923361036, "grad_norm": 0.31418851017951965, "learning_rate": 2.8771999999999998e-05, "loss": 0.0069, "step": 13687 }, { "epoch": 25.25484764542936, "grad_norm": 0.49223172664642334, "learning_rate": 2.8771666666666668e-05, "loss": 0.0102, "step": 13688 }, { "epoch": 25.25669436749769, "grad_norm": 0.16645334661006927, "learning_rate": 2.8771333333333334e-05, "loss": 0.0056, "step": 13689 }, { "epoch": 25.25854108956602, "grad_norm": 0.23253782093524933, "learning_rate": 2.8771e-05, "loss": 0.0042, "step": 13690 }, { "epoch": 25.26038781163435, "grad_norm": 0.17528776824474335, "learning_rate": 2.877066666666667e-05, "loss": 0.005, "step": 13691 }, { "epoch": 25.262234533702678, "grad_norm": 0.1108769103884697, "learning_rate": 2.8770333333333335e-05, "loss": 0.0033, "step": 13692 }, { "epoch": 25.264081255771007, "grad_norm": 0.32757946848869324, "learning_rate": 2.877e-05, "loss": 0.0039, "step": 13693 }, { "epoch": 25.265927977839336, "grad_norm": 0.29985570907592773, "learning_rate": 2.8769666666666667e-05, "loss": 0.0072, "step": 13694 }, { "epoch": 25.267774699907665, "grad_norm": 0.16443781554698944, "learning_rate": 2.8769333333333336e-05, "loss": 0.0048, "step": 13695 }, { "epoch": 25.26962142197599, "grad_norm": 0.15562738478183746, "learning_rate": 2.8769e-05, "loss": 0.003, "step": 13696 }, { "epoch": 25.27146814404432, "grad_norm": 0.29432448744773865, "learning_rate": 2.8768666666666668e-05, "loss": 0.0039, "step": 13697 }, { "epoch": 25.27331486611265, "grad_norm": 0.265778511762619, "learning_rate": 2.8768333333333334e-05, "loss": 0.0178, "step": 13698 }, { "epoch": 25.27516158818098, "grad_norm": 0.32704463601112366, "learning_rate": 2.8768e-05, "loss": 0.0085, "step": 13699 }, { "epoch": 25.277008310249307, "grad_norm": 0.0786413848400116, "learning_rate": 2.876766666666667e-05, "loss": 0.0009, "step": 13700 }, { "epoch": 25.278855032317637, "grad_norm": 0.43745529651641846, "learning_rate": 2.8767333333333335e-05, "loss": 0.1118, "step": 13701 }, { "epoch": 25.280701754385966, "grad_norm": 0.6298274993896484, "learning_rate": 2.8767e-05, "loss": 0.0922, "step": 13702 }, { "epoch": 25.282548476454295, "grad_norm": 0.3927988111972809, "learning_rate": 2.8766666666666667e-05, "loss": 0.076, "step": 13703 }, { "epoch": 25.284395198522624, "grad_norm": 0.34882792830467224, "learning_rate": 2.8766333333333336e-05, "loss": 0.055, "step": 13704 }, { "epoch": 25.28624192059095, "grad_norm": 0.31682804226875305, "learning_rate": 2.8766e-05, "loss": 0.0384, "step": 13705 }, { "epoch": 25.28808864265928, "grad_norm": 0.4535244405269623, "learning_rate": 2.8765666666666668e-05, "loss": 0.0295, "step": 13706 }, { "epoch": 25.289935364727608, "grad_norm": 0.4537026584148407, "learning_rate": 2.8765333333333337e-05, "loss": 0.0274, "step": 13707 }, { "epoch": 25.291782086795937, "grad_norm": 0.30101144313812256, "learning_rate": 2.8765e-05, "loss": 0.0326, "step": 13708 }, { "epoch": 25.293628808864266, "grad_norm": 0.6969102621078491, "learning_rate": 2.876466666666667e-05, "loss": 0.0912, "step": 13709 }, { "epoch": 25.295475530932595, "grad_norm": 0.3315484821796417, "learning_rate": 2.8764333333333332e-05, "loss": 0.0395, "step": 13710 }, { "epoch": 25.297322253000925, "grad_norm": 0.2811791002750397, "learning_rate": 2.8764e-05, "loss": 0.0426, "step": 13711 }, { "epoch": 25.299168975069254, "grad_norm": 0.4676089584827423, "learning_rate": 2.8763666666666667e-05, "loss": 0.0153, "step": 13712 }, { "epoch": 25.30101569713758, "grad_norm": 0.5364266633987427, "learning_rate": 2.8763333333333333e-05, "loss": 0.0456, "step": 13713 }, { "epoch": 25.30286241920591, "grad_norm": 0.25764209032058716, "learning_rate": 2.8763e-05, "loss": 0.0225, "step": 13714 }, { "epoch": 25.304709141274238, "grad_norm": 0.4249553680419922, "learning_rate": 2.8762666666666668e-05, "loss": 0.0166, "step": 13715 }, { "epoch": 25.306555863342567, "grad_norm": 0.3083512783050537, "learning_rate": 2.8762333333333334e-05, "loss": 0.0352, "step": 13716 }, { "epoch": 25.308402585410896, "grad_norm": 0.2641586363315582, "learning_rate": 2.8762e-05, "loss": 0.0079, "step": 13717 }, { "epoch": 25.310249307479225, "grad_norm": 0.2892804443836212, "learning_rate": 2.876166666666667e-05, "loss": 0.0073, "step": 13718 }, { "epoch": 25.312096029547554, "grad_norm": 0.3115205466747284, "learning_rate": 2.8761333333333332e-05, "loss": 0.0058, "step": 13719 }, { "epoch": 25.313942751615883, "grad_norm": 0.6394883990287781, "learning_rate": 2.8761e-05, "loss": 0.0246, "step": 13720 }, { "epoch": 25.31578947368421, "grad_norm": 0.29537880420684814, "learning_rate": 2.8760666666666667e-05, "loss": 0.0052, "step": 13721 }, { "epoch": 25.317636195752538, "grad_norm": 0.39419668912887573, "learning_rate": 2.8760333333333333e-05, "loss": 0.0063, "step": 13722 }, { "epoch": 25.319482917820867, "grad_norm": 0.11710794270038605, "learning_rate": 2.876e-05, "loss": 0.0033, "step": 13723 }, { "epoch": 25.321329639889196, "grad_norm": 0.20329396426677704, "learning_rate": 2.875966666666667e-05, "loss": 0.0032, "step": 13724 }, { "epoch": 25.323176361957525, "grad_norm": 0.1527843326330185, "learning_rate": 2.8759333333333334e-05, "loss": 0.0025, "step": 13725 }, { "epoch": 25.325023084025855, "grad_norm": 0.16141752898693085, "learning_rate": 2.8759e-05, "loss": 0.0057, "step": 13726 }, { "epoch": 25.326869806094184, "grad_norm": 0.2185315191745758, "learning_rate": 2.875866666666667e-05, "loss": 0.008, "step": 13727 }, { "epoch": 25.328716528162513, "grad_norm": 0.23512014746665955, "learning_rate": 2.8758333333333332e-05, "loss": 0.0059, "step": 13728 }, { "epoch": 25.330563250230842, "grad_norm": 0.18767587840557098, "learning_rate": 2.8758e-05, "loss": 0.0064, "step": 13729 }, { "epoch": 25.332409972299168, "grad_norm": 0.24704614281654358, "learning_rate": 2.8757666666666667e-05, "loss": 0.0064, "step": 13730 }, { "epoch": 25.334256694367497, "grad_norm": 0.7131418585777283, "learning_rate": 2.8757333333333333e-05, "loss": 0.0059, "step": 13731 }, { "epoch": 25.336103416435826, "grad_norm": 0.20158667862415314, "learning_rate": 2.8757000000000003e-05, "loss": 0.0052, "step": 13732 }, { "epoch": 25.337950138504155, "grad_norm": 0.23540963232517242, "learning_rate": 2.875666666666667e-05, "loss": 0.0063, "step": 13733 }, { "epoch": 25.339796860572484, "grad_norm": 0.15179704129695892, "learning_rate": 2.8756333333333335e-05, "loss": 0.004, "step": 13734 }, { "epoch": 25.341643582640813, "grad_norm": 0.3278792202472687, "learning_rate": 2.8756e-05, "loss": 0.0057, "step": 13735 }, { "epoch": 25.343490304709142, "grad_norm": 0.2591707408428192, "learning_rate": 2.875566666666667e-05, "loss": 0.0204, "step": 13736 }, { "epoch": 25.34533702677747, "grad_norm": 0.1418246179819107, "learning_rate": 2.8755333333333332e-05, "loss": 0.0036, "step": 13737 }, { "epoch": 25.347183748845797, "grad_norm": 0.16631825268268585, "learning_rate": 2.8755e-05, "loss": 0.0046, "step": 13738 }, { "epoch": 25.349030470914126, "grad_norm": 0.06749407947063446, "learning_rate": 2.8754666666666664e-05, "loss": 0.0022, "step": 13739 }, { "epoch": 25.350877192982455, "grad_norm": 0.07478956133127213, "learning_rate": 2.8754333333333334e-05, "loss": 0.0022, "step": 13740 }, { "epoch": 25.352723915050785, "grad_norm": 0.5097907185554504, "learning_rate": 2.8754000000000003e-05, "loss": 0.0154, "step": 13741 }, { "epoch": 25.354570637119114, "grad_norm": 0.1690087914466858, "learning_rate": 2.8753666666666665e-05, "loss": 0.0044, "step": 13742 }, { "epoch": 25.356417359187443, "grad_norm": 0.2194225937128067, "learning_rate": 2.8753333333333335e-05, "loss": 0.007, "step": 13743 }, { "epoch": 25.358264081255772, "grad_norm": 0.14810651540756226, "learning_rate": 2.8753e-05, "loss": 0.0057, "step": 13744 }, { "epoch": 25.3601108033241, "grad_norm": 0.21244044601917267, "learning_rate": 2.8752666666666667e-05, "loss": 0.0078, "step": 13745 }, { "epoch": 25.361957525392427, "grad_norm": 1.2006323337554932, "learning_rate": 2.8752333333333333e-05, "loss": 0.0092, "step": 13746 }, { "epoch": 25.363804247460756, "grad_norm": 0.177195206284523, "learning_rate": 2.8752000000000002e-05, "loss": 0.0025, "step": 13747 }, { "epoch": 25.365650969529085, "grad_norm": 0.3909979462623596, "learning_rate": 2.8751666666666664e-05, "loss": 0.0049, "step": 13748 }, { "epoch": 25.367497691597414, "grad_norm": 1.1116374731063843, "learning_rate": 2.8751333333333334e-05, "loss": 0.017, "step": 13749 }, { "epoch": 25.369344413665743, "grad_norm": 0.2975039780139923, "learning_rate": 2.8751000000000003e-05, "loss": 0.017, "step": 13750 }, { "epoch": 25.371191135734072, "grad_norm": NaN, "learning_rate": 2.8751000000000003e-05, "loss": 0.1609, "step": 13751 }, { "epoch": 25.3730378578024, "grad_norm": 0.5848435759544373, "learning_rate": 2.8750666666666666e-05, "loss": 0.1283, "step": 13752 }, { "epoch": 25.37488457987073, "grad_norm": 0.435305118560791, "learning_rate": 2.8750333333333335e-05, "loss": 0.0762, "step": 13753 }, { "epoch": 25.37673130193906, "grad_norm": 1.2225451469421387, "learning_rate": 2.875e-05, "loss": 0.0796, "step": 13754 }, { "epoch": 25.378578024007385, "grad_norm": 0.7846620678901672, "learning_rate": 2.8749666666666667e-05, "loss": 0.0573, "step": 13755 }, { "epoch": 25.380424746075715, "grad_norm": 0.41929861903190613, "learning_rate": 2.8749333333333333e-05, "loss": 0.055, "step": 13756 }, { "epoch": 25.382271468144044, "grad_norm": 0.5668908357620239, "learning_rate": 2.8749000000000002e-05, "loss": 0.0698, "step": 13757 }, { "epoch": 25.384118190212373, "grad_norm": 0.4807840883731842, "learning_rate": 2.8748666666666668e-05, "loss": 0.0487, "step": 13758 }, { "epoch": 25.385964912280702, "grad_norm": 0.503312885761261, "learning_rate": 2.8748333333333334e-05, "loss": 0.0667, "step": 13759 }, { "epoch": 25.38781163434903, "grad_norm": 0.6881386041641235, "learning_rate": 2.8748000000000003e-05, "loss": 0.0217, "step": 13760 }, { "epoch": 25.38965835641736, "grad_norm": 0.3284233510494232, "learning_rate": 2.8747666666666666e-05, "loss": 0.0201, "step": 13761 }, { "epoch": 25.39150507848569, "grad_norm": 0.49530842900276184, "learning_rate": 2.8747333333333335e-05, "loss": 0.0245, "step": 13762 }, { "epoch": 25.393351800554015, "grad_norm": 0.3061735928058624, "learning_rate": 2.8747e-05, "loss": 0.0183, "step": 13763 }, { "epoch": 25.395198522622344, "grad_norm": 0.29596859216690063, "learning_rate": 2.8746666666666667e-05, "loss": 0.03, "step": 13764 }, { "epoch": 25.397045244690673, "grad_norm": 0.27488064765930176, "learning_rate": 2.8746333333333333e-05, "loss": 0.0253, "step": 13765 }, { "epoch": 25.398891966759003, "grad_norm": 0.3477145731449127, "learning_rate": 2.8746000000000002e-05, "loss": 0.0144, "step": 13766 }, { "epoch": 25.40073868882733, "grad_norm": 0.319806843996048, "learning_rate": 2.8745666666666668e-05, "loss": 0.0111, "step": 13767 }, { "epoch": 25.40258541089566, "grad_norm": 0.3649977147579193, "learning_rate": 2.8745333333333334e-05, "loss": 0.022, "step": 13768 }, { "epoch": 25.40443213296399, "grad_norm": 0.11577984690666199, "learning_rate": 2.8745000000000003e-05, "loss": 0.0039, "step": 13769 }, { "epoch": 25.40627885503232, "grad_norm": 0.1643013060092926, "learning_rate": 2.8744666666666666e-05, "loss": 0.0048, "step": 13770 }, { "epoch": 25.408125577100645, "grad_norm": 0.25984930992126465, "learning_rate": 2.8744333333333335e-05, "loss": 0.0058, "step": 13771 }, { "epoch": 25.409972299168974, "grad_norm": 0.37410005927085876, "learning_rate": 2.8743999999999998e-05, "loss": 0.0093, "step": 13772 }, { "epoch": 25.411819021237303, "grad_norm": 0.5347874164581299, "learning_rate": 2.8743666666666667e-05, "loss": 0.0103, "step": 13773 }, { "epoch": 25.413665743305632, "grad_norm": 0.30911314487457275, "learning_rate": 2.8743333333333333e-05, "loss": 0.0097, "step": 13774 }, { "epoch": 25.41551246537396, "grad_norm": 0.2698220908641815, "learning_rate": 2.8743e-05, "loss": 0.006, "step": 13775 }, { "epoch": 25.41735918744229, "grad_norm": 0.3670751750469208, "learning_rate": 2.874266666666667e-05, "loss": 0.0166, "step": 13776 }, { "epoch": 25.41920590951062, "grad_norm": 0.31863951683044434, "learning_rate": 2.8742333333333334e-05, "loss": 0.0046, "step": 13777 }, { "epoch": 25.42105263157895, "grad_norm": 0.3697512447834015, "learning_rate": 2.8742e-05, "loss": 0.0069, "step": 13778 }, { "epoch": 25.422899353647278, "grad_norm": 0.1975865662097931, "learning_rate": 2.8741666666666666e-05, "loss": 0.0191, "step": 13779 }, { "epoch": 25.424746075715603, "grad_norm": 0.189303457736969, "learning_rate": 2.8741333333333336e-05, "loss": 0.003, "step": 13780 }, { "epoch": 25.426592797783933, "grad_norm": 0.747004508972168, "learning_rate": 2.8740999999999998e-05, "loss": 0.0108, "step": 13781 }, { "epoch": 25.42843951985226, "grad_norm": 0.07769419997930527, "learning_rate": 2.8740666666666667e-05, "loss": 0.0022, "step": 13782 }, { "epoch": 25.43028624192059, "grad_norm": 0.15571707487106323, "learning_rate": 2.8740333333333337e-05, "loss": 0.0038, "step": 13783 }, { "epoch": 25.43213296398892, "grad_norm": 0.06311183422803879, "learning_rate": 2.874e-05, "loss": 0.0021, "step": 13784 }, { "epoch": 25.43397968605725, "grad_norm": 0.12324243038892746, "learning_rate": 2.873966666666667e-05, "loss": 0.0031, "step": 13785 }, { "epoch": 25.43582640812558, "grad_norm": 1.1392844915390015, "learning_rate": 2.8739333333333335e-05, "loss": 0.0103, "step": 13786 }, { "epoch": 25.437673130193907, "grad_norm": 0.33735352754592896, "learning_rate": 2.8739e-05, "loss": 0.0333, "step": 13787 }, { "epoch": 25.439519852262233, "grad_norm": 0.18859614431858063, "learning_rate": 2.8738666666666666e-05, "loss": 0.0037, "step": 13788 }, { "epoch": 25.441366574330562, "grad_norm": 0.4658453166484833, "learning_rate": 2.8738333333333336e-05, "loss": 0.0044, "step": 13789 }, { "epoch": 25.44321329639889, "grad_norm": 0.29348111152648926, "learning_rate": 2.8737999999999998e-05, "loss": 0.0084, "step": 13790 }, { "epoch": 25.44506001846722, "grad_norm": 0.17754070460796356, "learning_rate": 2.8737666666666668e-05, "loss": 0.0055, "step": 13791 }, { "epoch": 25.44690674053555, "grad_norm": 0.2359544187784195, "learning_rate": 2.8737333333333337e-05, "loss": 0.0039, "step": 13792 }, { "epoch": 25.44875346260388, "grad_norm": 0.11587945371866226, "learning_rate": 2.8737e-05, "loss": 0.0019, "step": 13793 }, { "epoch": 25.450600184672208, "grad_norm": 0.3465985357761383, "learning_rate": 2.873666666666667e-05, "loss": 0.0096, "step": 13794 }, { "epoch": 25.452446906740537, "grad_norm": 0.31631413102149963, "learning_rate": 2.8736333333333335e-05, "loss": 0.0063, "step": 13795 }, { "epoch": 25.454293628808863, "grad_norm": 0.4622824788093567, "learning_rate": 2.8736e-05, "loss": 0.0081, "step": 13796 }, { "epoch": 25.45614035087719, "grad_norm": 0.1349441111087799, "learning_rate": 2.8735666666666667e-05, "loss": 0.0029, "step": 13797 }, { "epoch": 25.45798707294552, "grad_norm": 0.5047300457954407, "learning_rate": 2.8735333333333336e-05, "loss": 0.0045, "step": 13798 }, { "epoch": 25.45983379501385, "grad_norm": 0.4179021716117859, "learning_rate": 2.8735e-05, "loss": 0.0098, "step": 13799 }, { "epoch": 25.46168051708218, "grad_norm": 0.23024718463420868, "learning_rate": 2.8734666666666668e-05, "loss": 0.0035, "step": 13800 }, { "epoch": 25.46352723915051, "grad_norm": 0.7658423781394958, "learning_rate": 2.8734333333333334e-05, "loss": 0.1122, "step": 13801 }, { "epoch": 25.465373961218837, "grad_norm": 0.4380490183830261, "learning_rate": 2.8734e-05, "loss": 0.1327, "step": 13802 }, { "epoch": 25.467220683287167, "grad_norm": 0.5566225647926331, "learning_rate": 2.873366666666667e-05, "loss": 0.0584, "step": 13803 }, { "epoch": 25.469067405355496, "grad_norm": 1.2810173034667969, "learning_rate": 2.873333333333333e-05, "loss": 0.102, "step": 13804 }, { "epoch": 25.47091412742382, "grad_norm": 0.5459073781967163, "learning_rate": 2.8733e-05, "loss": 0.0893, "step": 13805 }, { "epoch": 25.47276084949215, "grad_norm": 0.4217475652694702, "learning_rate": 2.8732666666666667e-05, "loss": 0.0529, "step": 13806 }, { "epoch": 25.47460757156048, "grad_norm": 0.3302052617073059, "learning_rate": 2.8732333333333333e-05, "loss": 0.0353, "step": 13807 }, { "epoch": 25.47645429362881, "grad_norm": 0.39502203464508057, "learning_rate": 2.8732000000000002e-05, "loss": 0.053, "step": 13808 }, { "epoch": 25.478301015697138, "grad_norm": 0.3716045618057251, "learning_rate": 2.8731666666666668e-05, "loss": 0.0433, "step": 13809 }, { "epoch": 25.480147737765467, "grad_norm": 0.30203694105148315, "learning_rate": 2.8731333333333334e-05, "loss": 0.025, "step": 13810 }, { "epoch": 25.481994459833796, "grad_norm": 0.621515691280365, "learning_rate": 2.8731e-05, "loss": 0.0464, "step": 13811 }, { "epoch": 25.483841181902125, "grad_norm": 0.2422146201133728, "learning_rate": 2.873066666666667e-05, "loss": 0.013, "step": 13812 }, { "epoch": 25.48568790397045, "grad_norm": 0.3242398500442505, "learning_rate": 2.8730333333333332e-05, "loss": 0.0166, "step": 13813 }, { "epoch": 25.48753462603878, "grad_norm": 0.5535221099853516, "learning_rate": 2.873e-05, "loss": 0.0324, "step": 13814 }, { "epoch": 25.48938134810711, "grad_norm": 0.19889982044696808, "learning_rate": 2.8729666666666667e-05, "loss": 0.0244, "step": 13815 }, { "epoch": 25.49122807017544, "grad_norm": 0.14758522808551788, "learning_rate": 2.8729333333333333e-05, "loss": 0.0065, "step": 13816 }, { "epoch": 25.493074792243767, "grad_norm": 0.207974374294281, "learning_rate": 2.8729000000000002e-05, "loss": 0.0082, "step": 13817 }, { "epoch": 25.494921514312097, "grad_norm": 0.2960265278816223, "learning_rate": 2.8728666666666668e-05, "loss": 0.0108, "step": 13818 }, { "epoch": 25.496768236380426, "grad_norm": 0.48034244775772095, "learning_rate": 2.8728333333333334e-05, "loss": 0.0098, "step": 13819 }, { "epoch": 25.498614958448755, "grad_norm": 0.2875449061393738, "learning_rate": 2.8728e-05, "loss": 0.0097, "step": 13820 }, { "epoch": 25.50046168051708, "grad_norm": 0.2575296461582184, "learning_rate": 2.872766666666667e-05, "loss": 0.0059, "step": 13821 }, { "epoch": 25.50230840258541, "grad_norm": 0.19673795998096466, "learning_rate": 2.8727333333333332e-05, "loss": 0.0048, "step": 13822 }, { "epoch": 25.50415512465374, "grad_norm": 0.6091803908348083, "learning_rate": 2.8727e-05, "loss": 0.005, "step": 13823 }, { "epoch": 25.506001846722068, "grad_norm": 0.26289331912994385, "learning_rate": 2.8726666666666667e-05, "loss": 0.0073, "step": 13824 }, { "epoch": 25.507848568790397, "grad_norm": 0.2928977310657501, "learning_rate": 2.8726333333333333e-05, "loss": 0.0194, "step": 13825 }, { "epoch": 25.509695290858726, "grad_norm": 0.1658514142036438, "learning_rate": 2.8726000000000002e-05, "loss": 0.0038, "step": 13826 }, { "epoch": 25.511542012927055, "grad_norm": 0.5831472277641296, "learning_rate": 2.872566666666667e-05, "loss": 0.011, "step": 13827 }, { "epoch": 25.513388734995385, "grad_norm": 0.22511175274848938, "learning_rate": 2.8725333333333334e-05, "loss": 0.0051, "step": 13828 }, { "epoch": 25.51523545706371, "grad_norm": 0.2004740834236145, "learning_rate": 2.8725e-05, "loss": 0.0034, "step": 13829 }, { "epoch": 25.51708217913204, "grad_norm": 0.4446840286254883, "learning_rate": 2.8724666666666666e-05, "loss": 0.0075, "step": 13830 }, { "epoch": 25.51892890120037, "grad_norm": 0.45033150911331177, "learning_rate": 2.8724333333333332e-05, "loss": 0.0082, "step": 13831 }, { "epoch": 25.520775623268698, "grad_norm": 0.17718258500099182, "learning_rate": 2.8724e-05, "loss": 0.0063, "step": 13832 }, { "epoch": 25.522622345337027, "grad_norm": 0.17210236191749573, "learning_rate": 2.8723666666666667e-05, "loss": 0.0042, "step": 13833 }, { "epoch": 25.524469067405356, "grad_norm": 0.43359464406967163, "learning_rate": 2.8723333333333333e-05, "loss": 0.0059, "step": 13834 }, { "epoch": 25.526315789473685, "grad_norm": 0.3382089138031006, "learning_rate": 2.8723000000000003e-05, "loss": 0.0094, "step": 13835 }, { "epoch": 25.528162511542014, "grad_norm": 0.7305532693862915, "learning_rate": 2.8722666666666665e-05, "loss": 0.0186, "step": 13836 }, { "epoch": 25.530009233610343, "grad_norm": 0.691562831401825, "learning_rate": 2.8722333333333335e-05, "loss": 0.0353, "step": 13837 }, { "epoch": 25.53185595567867, "grad_norm": 0.22980351746082306, "learning_rate": 2.8722e-05, "loss": 0.0069, "step": 13838 }, { "epoch": 25.533702677746998, "grad_norm": 1.1313012838363647, "learning_rate": 2.8721666666666666e-05, "loss": 0.0122, "step": 13839 }, { "epoch": 25.535549399815327, "grad_norm": 0.2683294713497162, "learning_rate": 2.8721333333333332e-05, "loss": 0.0052, "step": 13840 }, { "epoch": 25.537396121883656, "grad_norm": 0.4816579520702362, "learning_rate": 2.8721e-05, "loss": 0.0124, "step": 13841 }, { "epoch": 25.539242843951985, "grad_norm": 0.2540108859539032, "learning_rate": 2.8720666666666668e-05, "loss": 0.0043, "step": 13842 }, { "epoch": 25.541089566020315, "grad_norm": 0.16645418107509613, "learning_rate": 2.8720333333333334e-05, "loss": 0.0051, "step": 13843 }, { "epoch": 25.542936288088644, "grad_norm": 0.447847843170166, "learning_rate": 2.8720000000000003e-05, "loss": 0.0087, "step": 13844 }, { "epoch": 25.544783010156973, "grad_norm": 0.4923630952835083, "learning_rate": 2.8719666666666665e-05, "loss": 0.0082, "step": 13845 }, { "epoch": 25.5466297322253, "grad_norm": 0.19476549327373505, "learning_rate": 2.8719333333333335e-05, "loss": 0.0048, "step": 13846 }, { "epoch": 25.548476454293628, "grad_norm": 0.5130172371864319, "learning_rate": 2.8719e-05, "loss": 0.0075, "step": 13847 }, { "epoch": 25.550323176361957, "grad_norm": 0.8781978487968445, "learning_rate": 2.8718666666666667e-05, "loss": 0.0066, "step": 13848 }, { "epoch": 25.552169898430286, "grad_norm": 0.5883736610412598, "learning_rate": 2.8718333333333333e-05, "loss": 0.0145, "step": 13849 }, { "epoch": 25.554016620498615, "grad_norm": 0.3041222393512726, "learning_rate": 2.8718000000000002e-05, "loss": 0.0065, "step": 13850 }, { "epoch": 25.555863342566944, "grad_norm": 0.588570773601532, "learning_rate": 2.8717666666666668e-05, "loss": 0.1257, "step": 13851 }, { "epoch": 25.557710064635273, "grad_norm": 0.49004557728767395, "learning_rate": 2.8717333333333334e-05, "loss": 0.1183, "step": 13852 }, { "epoch": 25.559556786703602, "grad_norm": 0.5519627332687378, "learning_rate": 2.8717000000000003e-05, "loss": 0.0888, "step": 13853 }, { "epoch": 25.56140350877193, "grad_norm": 0.6542931795120239, "learning_rate": 2.8716666666666666e-05, "loss": 0.0951, "step": 13854 }, { "epoch": 25.563250230840257, "grad_norm": 0.5098896026611328, "learning_rate": 2.8716333333333335e-05, "loss": 0.0795, "step": 13855 }, { "epoch": 25.565096952908586, "grad_norm": 0.561345636844635, "learning_rate": 2.8716e-05, "loss": 0.0592, "step": 13856 }, { "epoch": 25.566943674976915, "grad_norm": 0.38035711646080017, "learning_rate": 2.8715666666666667e-05, "loss": 0.0378, "step": 13857 }, { "epoch": 25.568790397045245, "grad_norm": 0.4322569966316223, "learning_rate": 2.8715333333333336e-05, "loss": 0.0544, "step": 13858 }, { "epoch": 25.570637119113574, "grad_norm": 0.6671208143234253, "learning_rate": 2.8715000000000002e-05, "loss": 0.0436, "step": 13859 }, { "epoch": 25.572483841181903, "grad_norm": 0.3293718695640564, "learning_rate": 2.8714666666666668e-05, "loss": 0.0358, "step": 13860 }, { "epoch": 25.574330563250232, "grad_norm": 0.2848909795284271, "learning_rate": 2.8714333333333334e-05, "loss": 0.0175, "step": 13861 }, { "epoch": 25.57617728531856, "grad_norm": 0.2711280286312103, "learning_rate": 2.8714e-05, "loss": 0.0316, "step": 13862 }, { "epoch": 25.578024007386887, "grad_norm": 0.2882595360279083, "learning_rate": 2.8713666666666666e-05, "loss": 0.0186, "step": 13863 }, { "epoch": 25.579870729455216, "grad_norm": 0.5483098030090332, "learning_rate": 2.8713333333333335e-05, "loss": 0.0553, "step": 13864 }, { "epoch": 25.581717451523545, "grad_norm": 1.0351965427398682, "learning_rate": 2.8712999999999998e-05, "loss": 0.0106, "step": 13865 }, { "epoch": 25.583564173591874, "grad_norm": 0.8314506411552429, "learning_rate": 2.8712666666666667e-05, "loss": 0.0213, "step": 13866 }, { "epoch": 25.585410895660203, "grad_norm": 0.5011735558509827, "learning_rate": 2.8712333333333336e-05, "loss": 0.01, "step": 13867 }, { "epoch": 25.587257617728532, "grad_norm": 0.19918778538703918, "learning_rate": 2.8712e-05, "loss": 0.0073, "step": 13868 }, { "epoch": 25.58910433979686, "grad_norm": 0.44274699687957764, "learning_rate": 2.8711666666666668e-05, "loss": 0.0138, "step": 13869 }, { "epoch": 25.59095106186519, "grad_norm": 0.2641814947128296, "learning_rate": 2.8711333333333334e-05, "loss": 0.0344, "step": 13870 }, { "epoch": 25.592797783933516, "grad_norm": 0.2949162721633911, "learning_rate": 2.8711e-05, "loss": 0.0069, "step": 13871 }, { "epoch": 25.594644506001845, "grad_norm": 0.4576517641544342, "learning_rate": 2.8710666666666666e-05, "loss": 0.0179, "step": 13872 }, { "epoch": 25.596491228070175, "grad_norm": 0.3257240653038025, "learning_rate": 2.8710333333333335e-05, "loss": 0.0097, "step": 13873 }, { "epoch": 25.598337950138504, "grad_norm": 0.266026109457016, "learning_rate": 2.871e-05, "loss": 0.0045, "step": 13874 }, { "epoch": 25.600184672206833, "grad_norm": 0.28286173939704895, "learning_rate": 2.8709666666666667e-05, "loss": 0.0074, "step": 13875 }, { "epoch": 25.602031394275162, "grad_norm": 0.5088093280792236, "learning_rate": 2.8709333333333337e-05, "loss": 0.0041, "step": 13876 }, { "epoch": 25.60387811634349, "grad_norm": 0.25021037459373474, "learning_rate": 2.8709e-05, "loss": 0.005, "step": 13877 }, { "epoch": 25.60572483841182, "grad_norm": 0.24160067737102509, "learning_rate": 2.870866666666667e-05, "loss": 0.0035, "step": 13878 }, { "epoch": 25.607571560480146, "grad_norm": 0.34590643644332886, "learning_rate": 2.8708333333333334e-05, "loss": 0.0074, "step": 13879 }, { "epoch": 25.609418282548475, "grad_norm": 0.5404072403907776, "learning_rate": 2.8708e-05, "loss": 0.0101, "step": 13880 }, { "epoch": 25.611265004616804, "grad_norm": 0.1285506635904312, "learning_rate": 2.8707666666666666e-05, "loss": 0.0035, "step": 13881 }, { "epoch": 25.613111726685133, "grad_norm": 0.23897305130958557, "learning_rate": 2.8707333333333336e-05, "loss": 0.0072, "step": 13882 }, { "epoch": 25.614958448753463, "grad_norm": 0.29718905687332153, "learning_rate": 2.8707e-05, "loss": 0.006, "step": 13883 }, { "epoch": 25.61680517082179, "grad_norm": 0.08984941989183426, "learning_rate": 2.8706666666666667e-05, "loss": 0.0024, "step": 13884 }, { "epoch": 25.61865189289012, "grad_norm": 0.223371222615242, "learning_rate": 2.8706333333333337e-05, "loss": 0.0074, "step": 13885 }, { "epoch": 25.62049861495845, "grad_norm": 0.16034305095672607, "learning_rate": 2.8706e-05, "loss": 0.0079, "step": 13886 }, { "epoch": 25.62234533702678, "grad_norm": 0.2069733887910843, "learning_rate": 2.870566666666667e-05, "loss": 0.0039, "step": 13887 }, { "epoch": 25.624192059095105, "grad_norm": 0.3699100613594055, "learning_rate": 2.8705333333333335e-05, "loss": 0.0145, "step": 13888 }, { "epoch": 25.626038781163434, "grad_norm": 0.5772353410720825, "learning_rate": 2.8705e-05, "loss": 0.0065, "step": 13889 }, { "epoch": 25.627885503231763, "grad_norm": 0.0805864930152893, "learning_rate": 2.8704666666666666e-05, "loss": 0.002, "step": 13890 }, { "epoch": 25.629732225300092, "grad_norm": 0.19519758224487305, "learning_rate": 2.8704333333333332e-05, "loss": 0.005, "step": 13891 }, { "epoch": 25.63157894736842, "grad_norm": 0.21675550937652588, "learning_rate": 2.8704e-05, "loss": 0.0061, "step": 13892 }, { "epoch": 25.63342566943675, "grad_norm": 0.15496018528938293, "learning_rate": 2.8703666666666668e-05, "loss": 0.0043, "step": 13893 }, { "epoch": 25.63527239150508, "grad_norm": 0.5796242356300354, "learning_rate": 2.8703333333333334e-05, "loss": 0.0093, "step": 13894 }, { "epoch": 25.63711911357341, "grad_norm": 0.26933422684669495, "learning_rate": 2.8703e-05, "loss": 0.004, "step": 13895 }, { "epoch": 25.638965835641734, "grad_norm": 1.446235179901123, "learning_rate": 2.870266666666667e-05, "loss": 0.0082, "step": 13896 }, { "epoch": 25.640812557710063, "grad_norm": 1.0523089170455933, "learning_rate": 2.870233333333333e-05, "loss": 0.015, "step": 13897 }, { "epoch": 25.642659279778393, "grad_norm": 0.45611003041267395, "learning_rate": 2.8702e-05, "loss": 0.0084, "step": 13898 }, { "epoch": 25.64450600184672, "grad_norm": 0.15992005169391632, "learning_rate": 2.8701666666666667e-05, "loss": 0.0025, "step": 13899 }, { "epoch": 25.64635272391505, "grad_norm": 0.08912533521652222, "learning_rate": 2.8701333333333333e-05, "loss": 0.0017, "step": 13900 }, { "epoch": 25.64819944598338, "grad_norm": 1.0898728370666504, "learning_rate": 2.8701000000000002e-05, "loss": 0.1265, "step": 13901 }, { "epoch": 25.65004616805171, "grad_norm": 0.5395174622535706, "learning_rate": 2.8700666666666668e-05, "loss": 0.1012, "step": 13902 }, { "epoch": 25.65189289012004, "grad_norm": 0.4160155951976776, "learning_rate": 2.8700333333333334e-05, "loss": 0.0628, "step": 13903 }, { "epoch": 25.653739612188367, "grad_norm": 0.5603948831558228, "learning_rate": 2.87e-05, "loss": 0.0745, "step": 13904 }, { "epoch": 25.655586334256693, "grad_norm": 0.4057348370552063, "learning_rate": 2.869966666666667e-05, "loss": 0.0692, "step": 13905 }, { "epoch": 25.657433056325022, "grad_norm": 0.379995733499527, "learning_rate": 2.869933333333333e-05, "loss": 0.0394, "step": 13906 }, { "epoch": 25.65927977839335, "grad_norm": 0.5244229435920715, "learning_rate": 2.8699e-05, "loss": 0.0504, "step": 13907 }, { "epoch": 25.66112650046168, "grad_norm": 0.34626778960227966, "learning_rate": 2.869866666666667e-05, "loss": 0.0344, "step": 13908 }, { "epoch": 25.66297322253001, "grad_norm": 0.5717699527740479, "learning_rate": 2.8698333333333333e-05, "loss": 0.0491, "step": 13909 }, { "epoch": 25.66481994459834, "grad_norm": 0.33867087960243225, "learning_rate": 2.8698000000000002e-05, "loss": 0.0252, "step": 13910 }, { "epoch": 25.666666666666668, "grad_norm": 0.7220029830932617, "learning_rate": 2.8697666666666668e-05, "loss": 0.0261, "step": 13911 }, { "epoch": 25.668513388734997, "grad_norm": 0.48679837584495544, "learning_rate": 2.8697333333333334e-05, "loss": 0.0953, "step": 13912 }, { "epoch": 25.670360110803323, "grad_norm": 0.45722731947898865, "learning_rate": 2.8697e-05, "loss": 0.0296, "step": 13913 }, { "epoch": 25.67220683287165, "grad_norm": 0.27095258235931396, "learning_rate": 2.869666666666667e-05, "loss": 0.033, "step": 13914 }, { "epoch": 25.67405355493998, "grad_norm": 0.12416177988052368, "learning_rate": 2.8696333333333332e-05, "loss": 0.004, "step": 13915 }, { "epoch": 25.67590027700831, "grad_norm": 0.27076831459999084, "learning_rate": 2.8696e-05, "loss": 0.0085, "step": 13916 }, { "epoch": 25.67774699907664, "grad_norm": 0.4575108289718628, "learning_rate": 2.869566666666667e-05, "loss": 0.0153, "step": 13917 }, { "epoch": 25.67959372114497, "grad_norm": 0.33490633964538574, "learning_rate": 2.8695333333333333e-05, "loss": 0.0392, "step": 13918 }, { "epoch": 25.681440443213297, "grad_norm": 0.4233061969280243, "learning_rate": 2.8695000000000002e-05, "loss": 0.0212, "step": 13919 }, { "epoch": 25.683287165281627, "grad_norm": 0.37904930114746094, "learning_rate": 2.8694666666666668e-05, "loss": 0.0341, "step": 13920 }, { "epoch": 25.685133887349952, "grad_norm": 0.3937544524669647, "learning_rate": 2.8694333333333334e-05, "loss": 0.0148, "step": 13921 }, { "epoch": 25.68698060941828, "grad_norm": 0.21121932566165924, "learning_rate": 2.8694e-05, "loss": 0.0095, "step": 13922 }, { "epoch": 25.68882733148661, "grad_norm": 0.17242980003356934, "learning_rate": 2.8693666666666666e-05, "loss": 0.0067, "step": 13923 }, { "epoch": 25.69067405355494, "grad_norm": 0.6399073600769043, "learning_rate": 2.8693333333333335e-05, "loss": 0.006, "step": 13924 }, { "epoch": 25.69252077562327, "grad_norm": 0.4123702943325043, "learning_rate": 2.8693e-05, "loss": 0.0123, "step": 13925 }, { "epoch": 25.694367497691598, "grad_norm": 0.2438940405845642, "learning_rate": 2.8692666666666667e-05, "loss": 0.0168, "step": 13926 }, { "epoch": 25.696214219759927, "grad_norm": 0.18913382291793823, "learning_rate": 2.8692333333333333e-05, "loss": 0.0063, "step": 13927 }, { "epoch": 25.698060941828256, "grad_norm": 0.2355855107307434, "learning_rate": 2.8692000000000002e-05, "loss": 0.0047, "step": 13928 }, { "epoch": 25.69990766389658, "grad_norm": 0.16520513594150543, "learning_rate": 2.8691666666666665e-05, "loss": 0.0062, "step": 13929 }, { "epoch": 25.70175438596491, "grad_norm": 0.50859135389328, "learning_rate": 2.8691333333333334e-05, "loss": 0.0138, "step": 13930 }, { "epoch": 25.70360110803324, "grad_norm": 0.3660358488559723, "learning_rate": 2.8691e-05, "loss": 0.0085, "step": 13931 }, { "epoch": 25.70544783010157, "grad_norm": 0.2390100210905075, "learning_rate": 2.8690666666666666e-05, "loss": 0.0045, "step": 13932 }, { "epoch": 25.7072945521699, "grad_norm": 0.2729160785675049, "learning_rate": 2.8690333333333336e-05, "loss": 0.011, "step": 13933 }, { "epoch": 25.709141274238227, "grad_norm": 0.2517271041870117, "learning_rate": 2.869e-05, "loss": 0.0084, "step": 13934 }, { "epoch": 25.710987996306557, "grad_norm": 0.291607141494751, "learning_rate": 2.8689666666666667e-05, "loss": 0.0109, "step": 13935 }, { "epoch": 25.712834718374886, "grad_norm": 0.43152952194213867, "learning_rate": 2.8689333333333333e-05, "loss": 0.0105, "step": 13936 }, { "epoch": 25.714681440443215, "grad_norm": 0.21754711866378784, "learning_rate": 2.8689000000000003e-05, "loss": 0.0107, "step": 13937 }, { "epoch": 25.71652816251154, "grad_norm": 0.3606095016002655, "learning_rate": 2.8688666666666665e-05, "loss": 0.0068, "step": 13938 }, { "epoch": 25.71837488457987, "grad_norm": 0.3872680962085724, "learning_rate": 2.8688333333333335e-05, "loss": 0.0066, "step": 13939 }, { "epoch": 25.7202216066482, "grad_norm": 0.24474932253360748, "learning_rate": 2.8688e-05, "loss": 0.0074, "step": 13940 }, { "epoch": 25.722068328716528, "grad_norm": 0.0998922809958458, "learning_rate": 2.8687666666666666e-05, "loss": 0.003, "step": 13941 }, { "epoch": 25.723915050784857, "grad_norm": 0.36174479126930237, "learning_rate": 2.8687333333333336e-05, "loss": 0.0061, "step": 13942 }, { "epoch": 25.725761772853186, "grad_norm": 0.3098700940608978, "learning_rate": 2.8687e-05, "loss": 0.0055, "step": 13943 }, { "epoch": 25.727608494921515, "grad_norm": 0.638459324836731, "learning_rate": 2.8686666666666668e-05, "loss": 0.0109, "step": 13944 }, { "epoch": 25.729455216989845, "grad_norm": 0.35461121797561646, "learning_rate": 2.8686333333333334e-05, "loss": 0.0093, "step": 13945 }, { "epoch": 25.73130193905817, "grad_norm": 0.14713731408119202, "learning_rate": 2.8686000000000003e-05, "loss": 0.0042, "step": 13946 }, { "epoch": 25.7331486611265, "grad_norm": 0.21553030610084534, "learning_rate": 2.8685666666666665e-05, "loss": 0.0061, "step": 13947 }, { "epoch": 25.73499538319483, "grad_norm": 0.24078437685966492, "learning_rate": 2.8685333333333335e-05, "loss": 0.0054, "step": 13948 }, { "epoch": 25.736842105263158, "grad_norm": 0.4704957604408264, "learning_rate": 2.8685e-05, "loss": 0.0136, "step": 13949 }, { "epoch": 25.738688827331487, "grad_norm": 1.031306505203247, "learning_rate": 2.8684666666666667e-05, "loss": 0.0218, "step": 13950 }, { "epoch": 25.740535549399816, "grad_norm": 0.7354793548583984, "learning_rate": 2.8684333333333336e-05, "loss": 0.1239, "step": 13951 }, { "epoch": 25.742382271468145, "grad_norm": 0.43336817622184753, "learning_rate": 2.8684e-05, "loss": 0.0813, "step": 13952 }, { "epoch": 25.744228993536474, "grad_norm": 0.48846545815467834, "learning_rate": 2.8683666666666668e-05, "loss": 0.111, "step": 13953 }, { "epoch": 25.746075715604803, "grad_norm": 0.48932769894599915, "learning_rate": 2.8683333333333334e-05, "loss": 0.0533, "step": 13954 }, { "epoch": 25.74792243767313, "grad_norm": 0.5916051864624023, "learning_rate": 2.8683e-05, "loss": 0.0647, "step": 13955 }, { "epoch": 25.749769159741458, "grad_norm": 0.5129830837249756, "learning_rate": 2.8682666666666666e-05, "loss": 0.0752, "step": 13956 }, { "epoch": 25.751615881809787, "grad_norm": 0.4459969699382782, "learning_rate": 2.8682333333333335e-05, "loss": 0.0473, "step": 13957 }, { "epoch": 25.753462603878116, "grad_norm": 0.8263006210327148, "learning_rate": 2.8682e-05, "loss": 0.0351, "step": 13958 }, { "epoch": 25.755309325946445, "grad_norm": 0.6295078992843628, "learning_rate": 2.8681666666666667e-05, "loss": 0.042, "step": 13959 }, { "epoch": 25.757156048014775, "grad_norm": 0.3544279932975769, "learning_rate": 2.8681333333333336e-05, "loss": 0.0261, "step": 13960 }, { "epoch": 25.759002770083104, "grad_norm": 0.2778114676475525, "learning_rate": 2.8681e-05, "loss": 0.0233, "step": 13961 }, { "epoch": 25.760849492151433, "grad_norm": 0.26822900772094727, "learning_rate": 2.8680666666666668e-05, "loss": 0.0319, "step": 13962 }, { "epoch": 25.76269621421976, "grad_norm": 0.7484460473060608, "learning_rate": 2.8680333333333334e-05, "loss": 0.0482, "step": 13963 }, { "epoch": 25.764542936288088, "grad_norm": 0.1940113604068756, "learning_rate": 2.868e-05, "loss": 0.0103, "step": 13964 }, { "epoch": 25.766389658356417, "grad_norm": 0.3328947126865387, "learning_rate": 2.8679666666666666e-05, "loss": 0.0206, "step": 13965 }, { "epoch": 25.768236380424746, "grad_norm": 0.3248881697654724, "learning_rate": 2.8679333333333335e-05, "loss": 0.0189, "step": 13966 }, { "epoch": 25.770083102493075, "grad_norm": 0.17101606726646423, "learning_rate": 2.8679e-05, "loss": 0.0071, "step": 13967 }, { "epoch": 25.771929824561404, "grad_norm": 0.18649211525917053, "learning_rate": 2.8678666666666667e-05, "loss": 0.0083, "step": 13968 }, { "epoch": 25.773776546629733, "grad_norm": 0.2769390940666199, "learning_rate": 2.8678333333333336e-05, "loss": 0.0091, "step": 13969 }, { "epoch": 25.775623268698062, "grad_norm": 0.2639617323875427, "learning_rate": 2.8678e-05, "loss": 0.0082, "step": 13970 }, { "epoch": 25.777469990766388, "grad_norm": 0.27369749546051025, "learning_rate": 2.8677666666666668e-05, "loss": 0.0112, "step": 13971 }, { "epoch": 25.779316712834717, "grad_norm": 0.2493075728416443, "learning_rate": 2.8677333333333334e-05, "loss": 0.0053, "step": 13972 }, { "epoch": 25.781163434903046, "grad_norm": 0.35896432399749756, "learning_rate": 2.8677e-05, "loss": 0.0094, "step": 13973 }, { "epoch": 25.783010156971375, "grad_norm": 0.6993874311447144, "learning_rate": 2.867666666666667e-05, "loss": 0.0189, "step": 13974 }, { "epoch": 25.784856879039705, "grad_norm": 0.27760475873947144, "learning_rate": 2.8676333333333335e-05, "loss": 0.0064, "step": 13975 }, { "epoch": 25.786703601108034, "grad_norm": 0.32611775398254395, "learning_rate": 2.8676e-05, "loss": 0.0074, "step": 13976 }, { "epoch": 25.788550323176363, "grad_norm": 0.17413999140262604, "learning_rate": 2.8675666666666667e-05, "loss": 0.0061, "step": 13977 }, { "epoch": 25.790397045244692, "grad_norm": 0.1638621687889099, "learning_rate": 2.8675333333333336e-05, "loss": 0.008, "step": 13978 }, { "epoch": 25.792243767313018, "grad_norm": 0.4006885588169098, "learning_rate": 2.8675e-05, "loss": 0.0101, "step": 13979 }, { "epoch": 25.794090489381347, "grad_norm": 0.29481589794158936, "learning_rate": 2.867466666666667e-05, "loss": 0.009, "step": 13980 }, { "epoch": 25.795937211449676, "grad_norm": 0.10241249948740005, "learning_rate": 2.867433333333333e-05, "loss": 0.0025, "step": 13981 }, { "epoch": 25.797783933518005, "grad_norm": 0.198036789894104, "learning_rate": 2.8674e-05, "loss": 0.0068, "step": 13982 }, { "epoch": 25.799630655586334, "grad_norm": 0.14961473643779755, "learning_rate": 2.867366666666667e-05, "loss": 0.0051, "step": 13983 }, { "epoch": 25.801477377654663, "grad_norm": 0.11135370284318924, "learning_rate": 2.8673333333333332e-05, "loss": 0.0036, "step": 13984 }, { "epoch": 25.803324099722992, "grad_norm": 0.38078421354293823, "learning_rate": 2.8673e-05, "loss": 0.0071, "step": 13985 }, { "epoch": 25.80517082179132, "grad_norm": 0.15546701848506927, "learning_rate": 2.8672666666666667e-05, "loss": 0.0037, "step": 13986 }, { "epoch": 25.80701754385965, "grad_norm": 0.3690662980079651, "learning_rate": 2.8672333333333333e-05, "loss": 0.0069, "step": 13987 }, { "epoch": 25.808864265927976, "grad_norm": 0.21701663732528687, "learning_rate": 2.8672e-05, "loss": 0.0119, "step": 13988 }, { "epoch": 25.810710987996305, "grad_norm": 0.5214126706123352, "learning_rate": 2.867166666666667e-05, "loss": 0.0079, "step": 13989 }, { "epoch": 25.812557710064635, "grad_norm": 0.24381795525550842, "learning_rate": 2.867133333333333e-05, "loss": 0.0042, "step": 13990 }, { "epoch": 25.814404432132964, "grad_norm": 0.36028149724006653, "learning_rate": 2.8671e-05, "loss": 0.0048, "step": 13991 }, { "epoch": 25.816251154201293, "grad_norm": 0.2194744199514389, "learning_rate": 2.867066666666667e-05, "loss": 0.0033, "step": 13992 }, { "epoch": 25.818097876269622, "grad_norm": 0.3237725496292114, "learning_rate": 2.8670333333333332e-05, "loss": 0.0069, "step": 13993 }, { "epoch": 25.81994459833795, "grad_norm": 0.4830024540424347, "learning_rate": 2.867e-05, "loss": 0.0158, "step": 13994 }, { "epoch": 25.82179132040628, "grad_norm": 0.8800148367881775, "learning_rate": 2.8669666666666668e-05, "loss": 0.0311, "step": 13995 }, { "epoch": 25.823638042474606, "grad_norm": 0.18014296889305115, "learning_rate": 2.8669333333333334e-05, "loss": 0.0028, "step": 13996 }, { "epoch": 25.825484764542935, "grad_norm": 0.25388187170028687, "learning_rate": 2.8669e-05, "loss": 0.0049, "step": 13997 }, { "epoch": 25.827331486611264, "grad_norm": 0.5490654706954956, "learning_rate": 2.866866666666667e-05, "loss": 0.0101, "step": 13998 }, { "epoch": 25.829178208679593, "grad_norm": 0.4083966910839081, "learning_rate": 2.8668333333333335e-05, "loss": 0.008, "step": 13999 }, { "epoch": 25.831024930747922, "grad_norm": 0.3681202232837677, "learning_rate": 2.8668e-05, "loss": 0.0142, "step": 14000 }, { "epoch": 25.831024930747922, "eval_cer": 0.11683127251989528, "eval_loss": 0.417955607175827, "eval_runtime": 16.0102, "eval_samples_per_second": 18.988, "eval_steps_per_second": 0.625, "eval_wer": 0.4121258633921719, "step": 14000 }, { "epoch": 25.831024930747922, "step": 14000, "total_flos": 1.8828835451512226e+20, "train_loss": 0.3484456565128265, "train_runtime": 27570.5866, "train_samples_per_second": 232.131, "train_steps_per_second": 3.627 } ], "logging_steps": 1.0, "max_steps": 100000, "num_input_tokens_seen": 0, "num_train_epochs": 185, "save_steps": 1000, "stateful_callbacks": { "EarlyStoppingCallback": { "args": { "early_stopping_patience": 5, "early_stopping_threshold": 0.0 }, "attributes": { "early_stopping_patience_counter": 5 } }, "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": true }, "attributes": {} } }, "total_flos": 1.8828835451512226e+20, "train_batch_size": 32, "trial_name": null, "trial_params": null }