| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 79.43793911007026, | |
| "eval_steps": 500, | |
| "global_step": 4240, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.1873536299765808, | |
| "grad_norm": 5.991908550262451, | |
| "learning_rate": 4.9999313759953484e-05, | |
| "loss": 6.4768, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.3747072599531616, | |
| "grad_norm": 8.760614395141602, | |
| "learning_rate": 4.999725507748798e-05, | |
| "loss": 5.0186, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.5620608899297423, | |
| "grad_norm": 4.322343826293945, | |
| "learning_rate": 4.999382406562349e-05, | |
| "loss": 3.1088, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.7494145199063232, | |
| "grad_norm": 1.4796526432037354, | |
| "learning_rate": 4.9989020912719864e-05, | |
| "loss": 2.2997, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.936768149882904, | |
| "grad_norm": 0.8381783962249756, | |
| "learning_rate": 4.998284588246634e-05, | |
| "loss": 1.9748, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.1241217798594847, | |
| "grad_norm": 0.6073408722877502, | |
| "learning_rate": 4.99752993138672e-05, | |
| "loss": 1.8716, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.3114754098360657, | |
| "grad_norm": 0.5777516961097717, | |
| "learning_rate": 4.996638162122302e-05, | |
| "loss": 1.7803, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.4988290398126463, | |
| "grad_norm": 0.5658383369445801, | |
| "learning_rate": 4.995609329410804e-05, | |
| "loss": 1.7585, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.6861826697892273, | |
| "grad_norm": 0.6109507083892822, | |
| "learning_rate": 4.994443489734322e-05, | |
| "loss": 1.6902, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.8735362997658078, | |
| "grad_norm": 0.6710304021835327, | |
| "learning_rate": 4.9931407070965254e-05, | |
| "loss": 1.675, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 2.060889929742389, | |
| "grad_norm": 0.6605099439620972, | |
| "learning_rate": 4.9917010530191455e-05, | |
| "loss": 1.5914, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 2.2482435597189694, | |
| "grad_norm": 0.8813541531562805, | |
| "learning_rate": 4.990124606538042e-05, | |
| "loss": 1.5681, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 2.4355971896955504, | |
| "grad_norm": 0.9561053514480591, | |
| "learning_rate": 4.9884114541988747e-05, | |
| "loss": 1.4891, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.6229508196721314, | |
| "grad_norm": 1.0302178859710693, | |
| "learning_rate": 4.9865616900523406e-05, | |
| "loss": 1.4524, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.810304449648712, | |
| "grad_norm": 1.114527940750122, | |
| "learning_rate": 4.984575415649019e-05, | |
| "loss": 1.3666, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.9976580796252925, | |
| "grad_norm": 1.1488112211227417, | |
| "learning_rate": 4.982452740033793e-05, | |
| "loss": 1.3581, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 3.1850117096018735, | |
| "grad_norm": 1.3187187910079956, | |
| "learning_rate": 4.9801937797398636e-05, | |
| "loss": 1.2926, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 3.3723653395784545, | |
| "grad_norm": 1.4319546222686768, | |
| "learning_rate": 4.9777986587823523e-05, | |
| "loss": 1.2463, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 3.559718969555035, | |
| "grad_norm": 1.4622282981872559, | |
| "learning_rate": 4.975267508651492e-05, | |
| "loss": 1.2304, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 3.747072599531616, | |
| "grad_norm": 2.000797748565674, | |
| "learning_rate": 4.97260046830541e-05, | |
| "loss": 1.2252, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 3.9344262295081966, | |
| "grad_norm": 1.6277581453323364, | |
| "learning_rate": 4.969797684162498e-05, | |
| "loss": 1.1649, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 4.121779859484778, | |
| "grad_norm": 2.1771910190582275, | |
| "learning_rate": 4.966859310093372e-05, | |
| "loss": 1.1471, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 4.309133489461359, | |
| "grad_norm": 1.9214098453521729, | |
| "learning_rate": 4.9637855074124305e-05, | |
| "loss": 1.1064, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 4.496487119437939, | |
| "grad_norm": 2.057615041732788, | |
| "learning_rate": 4.960576444868992e-05, | |
| "loss": 1.1061, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 4.68384074941452, | |
| "grad_norm": 1.851131796836853, | |
| "learning_rate": 4.957232298638036e-05, | |
| "loss": 1.0318, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 4.871194379391101, | |
| "grad_norm": 2.1048285961151123, | |
| "learning_rate": 4.953753252310526e-05, | |
| "loss": 1.0444, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 5.058548009367682, | |
| "grad_norm": 2.0235438346862793, | |
| "learning_rate": 4.950139496883335e-05, | |
| "loss": 1.0303, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 5.245901639344262, | |
| "grad_norm": 2.574484348297119, | |
| "learning_rate": 4.94639123074876e-05, | |
| "loss": 0.9954, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 5.433255269320843, | |
| "grad_norm": 2.1624159812927246, | |
| "learning_rate": 4.9425086596836264e-05, | |
| "loss": 0.9686, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 5.620608899297424, | |
| "grad_norm": 2.923964023590088, | |
| "learning_rate": 4.9384919968379945e-05, | |
| "loss": 0.961, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 5.807962529274005, | |
| "grad_norm": 3.605888843536377, | |
| "learning_rate": 4.934341462723455e-05, | |
| "loss": 0.9713, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 5.995316159250585, | |
| "grad_norm": 2.6934282779693604, | |
| "learning_rate": 4.930057285201027e-05, | |
| "loss": 0.9327, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 6.182669789227166, | |
| "grad_norm": 2.723066568374634, | |
| "learning_rate": 4.925639699468645e-05, | |
| "loss": 0.8974, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 6.370023419203747, | |
| "grad_norm": 2.7989768981933594, | |
| "learning_rate": 4.921088948048247e-05, | |
| "loss": 0.8912, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 6.557377049180328, | |
| "grad_norm": 2.685530424118042, | |
| "learning_rate": 4.916405280772462e-05, | |
| "loss": 0.8809, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 6.744730679156909, | |
| "grad_norm": 2.441941738128662, | |
| "learning_rate": 4.911588954770897e-05, | |
| "loss": 0.8984, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 6.932084309133489, | |
| "grad_norm": 2.7177064418792725, | |
| "learning_rate": 4.9066402344560115e-05, | |
| "loss": 0.855, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 7.11943793911007, | |
| "grad_norm": 3.2908644676208496, | |
| "learning_rate": 4.901559391508611e-05, | |
| "loss": 0.8785, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 7.306791569086651, | |
| "grad_norm": 3.195590019226074, | |
| "learning_rate": 4.896346704862927e-05, | |
| "loss": 0.8542, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 7.494145199063232, | |
| "grad_norm": 3.4960882663726807, | |
| "learning_rate": 4.891002460691306e-05, | |
| "loss": 0.7881, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 7.681498829039812, | |
| "grad_norm": 2.852773666381836, | |
| "learning_rate": 4.885526952388498e-05, | |
| "loss": 0.7957, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 7.868852459016393, | |
| "grad_norm": 2.8862316608428955, | |
| "learning_rate": 4.879920480555549e-05, | |
| "loss": 0.8022, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 8.056206088992974, | |
| "grad_norm": 2.706434488296509, | |
| "learning_rate": 4.874183352983297e-05, | |
| "loss": 0.7811, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 8.243559718969555, | |
| "grad_norm": 3.107057809829712, | |
| "learning_rate": 4.8683158846354786e-05, | |
| "loss": 0.7786, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 8.430913348946136, | |
| "grad_norm": 3.327939033508301, | |
| "learning_rate": 4.862318397631434e-05, | |
| "loss": 0.7431, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 8.618266978922717, | |
| "grad_norm": 3.249922513961792, | |
| "learning_rate": 4.8561912212284223e-05, | |
| "loss": 0.7477, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 8.805620608899297, | |
| "grad_norm": 3.709353446960449, | |
| "learning_rate": 4.84993469180355e-05, | |
| "loss": 0.7973, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 8.992974238875878, | |
| "grad_norm": 2.777991533279419, | |
| "learning_rate": 4.8435491528353026e-05, | |
| "loss": 0.7699, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 9.180327868852459, | |
| "grad_norm": 2.6447904109954834, | |
| "learning_rate": 4.8370349548846823e-05, | |
| "loss": 0.692, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 9.36768149882904, | |
| "grad_norm": 3.150733232498169, | |
| "learning_rate": 4.83039245557597e-05, | |
| "loss": 0.7435, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 9.55503512880562, | |
| "grad_norm": 3.316145420074463, | |
| "learning_rate": 4.823622019577089e-05, | |
| "loss": 0.7589, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 9.742388758782202, | |
| "grad_norm": 3.4923229217529297, | |
| "learning_rate": 4.8167240185795835e-05, | |
| "loss": 0.7262, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 9.929742388758783, | |
| "grad_norm": 3.1557626724243164, | |
| "learning_rate": 4.8096988312782174e-05, | |
| "loss": 0.6824, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 10.117096018735364, | |
| "grad_norm": 3.2009847164154053, | |
| "learning_rate": 4.8025468433501774e-05, | |
| "loss": 0.7238, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 10.304449648711945, | |
| "grad_norm": 3.149319887161255, | |
| "learning_rate": 4.795268447433907e-05, | |
| "loss": 0.7002, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 10.491803278688524, | |
| "grad_norm": 3.1471166610717773, | |
| "learning_rate": 4.787864043107546e-05, | |
| "loss": 0.6933, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 10.679156908665105, | |
| "grad_norm": 2.9310121536254883, | |
| "learning_rate": 4.780334036866997e-05, | |
| "loss": 0.7152, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 10.866510538641686, | |
| "grad_norm": 3.6723790168762207, | |
| "learning_rate": 4.772678842103605e-05, | |
| "loss": 0.679, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 11.053864168618267, | |
| "grad_norm": 3.633692502975464, | |
| "learning_rate": 4.764898879081468e-05, | |
| "loss": 0.6489, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 11.241217798594848, | |
| "grad_norm": 3.9104480743408203, | |
| "learning_rate": 4.756994574914359e-05, | |
| "loss": 0.6116, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 11.428571428571429, | |
| "grad_norm": 3.8203835487365723, | |
| "learning_rate": 4.748966363542285e-05, | |
| "loss": 0.6549, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 11.61592505854801, | |
| "grad_norm": 3.5407750606536865, | |
| "learning_rate": 4.7408146857076566e-05, | |
| "loss": 0.6532, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 11.80327868852459, | |
| "grad_norm": 4.155674457550049, | |
| "learning_rate": 4.732539988931097e-05, | |
| "loss": 0.6684, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 11.99063231850117, | |
| "grad_norm": 4.21943473815918, | |
| "learning_rate": 4.724142727486869e-05, | |
| "loss": 0.683, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 12.177985948477751, | |
| "grad_norm": 3.3667685985565186, | |
| "learning_rate": 4.715623362377939e-05, | |
| "loss": 0.6127, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 12.365339578454332, | |
| "grad_norm": 3.7362210750579834, | |
| "learning_rate": 4.706982361310669e-05, | |
| "loss": 0.606, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 12.552693208430913, | |
| "grad_norm": 3.549915313720703, | |
| "learning_rate": 4.698220198669136e-05, | |
| "loss": 0.6349, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 12.740046838407494, | |
| "grad_norm": 3.360044002532959, | |
| "learning_rate": 4.6893373554890925e-05, | |
| "loss": 0.6335, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 12.927400468384075, | |
| "grad_norm": 3.676515817642212, | |
| "learning_rate": 4.680334319431555e-05, | |
| "loss": 0.6283, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 13.114754098360656, | |
| "grad_norm": 4.325002193450928, | |
| "learning_rate": 4.6712115847560355e-05, | |
| "loss": 0.6248, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 13.302107728337237, | |
| "grad_norm": 4.889411449432373, | |
| "learning_rate": 4.6619696522934024e-05, | |
| "loss": 0.5959, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 13.489461358313818, | |
| "grad_norm": 4.090158939361572, | |
| "learning_rate": 4.652609029418389e-05, | |
| "loss": 0.6068, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 13.676814988290397, | |
| "grad_norm": 3.8901116847991943, | |
| "learning_rate": 4.643130230021737e-05, | |
| "loss": 0.6042, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 13.864168618266978, | |
| "grad_norm": 4.178182125091553, | |
| "learning_rate": 4.6335337744819873e-05, | |
| "loss": 0.6253, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 14.05152224824356, | |
| "grad_norm": 3.4572255611419678, | |
| "learning_rate": 4.6238201896369055e-05, | |
| "loss": 0.6136, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 14.23887587822014, | |
| "grad_norm": 4.382098197937012, | |
| "learning_rate": 4.613990008754565e-05, | |
| "loss": 0.5499, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 14.426229508196721, | |
| "grad_norm": 4.183515548706055, | |
| "learning_rate": 4.6040437715040706e-05, | |
| "loss": 0.5845, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 14.613583138173302, | |
| "grad_norm": 3.2976605892181396, | |
| "learning_rate": 4.593982023925926e-05, | |
| "loss": 0.6097, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 14.800936768149883, | |
| "grad_norm": 4.0663347244262695, | |
| "learning_rate": 4.58380531840206e-05, | |
| "loss": 0.6161, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 14.988290398126464, | |
| "grad_norm": 4.074985980987549, | |
| "learning_rate": 4.573514213625505e-05, | |
| "loss": 0.5597, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 15.175644028103045, | |
| "grad_norm": 4.086116790771484, | |
| "learning_rate": 4.563109274569717e-05, | |
| "loss": 0.5484, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 15.362997658079625, | |
| "grad_norm": 3.6355836391448975, | |
| "learning_rate": 4.552591072457565e-05, | |
| "loss": 0.5561, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 15.550351288056206, | |
| "grad_norm": 4.557271480560303, | |
| "learning_rate": 4.5419601847299706e-05, | |
| "loss": 0.5729, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 15.737704918032787, | |
| "grad_norm": 3.3405730724334717, | |
| "learning_rate": 4.5312171950142034e-05, | |
| "loss": 0.5654, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 15.925058548009368, | |
| "grad_norm": 5.102325439453125, | |
| "learning_rate": 4.5203626930918455e-05, | |
| "loss": 0.5766, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 16.11241217798595, | |
| "grad_norm": 4.394238471984863, | |
| "learning_rate": 4.509397274866409e-05, | |
| "loss": 0.5617, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 16.29976580796253, | |
| "grad_norm": 4.780090808868408, | |
| "learning_rate": 4.498321542330623e-05, | |
| "loss": 0.5363, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 16.48711943793911, | |
| "grad_norm": 3.9079573154449463, | |
| "learning_rate": 4.4871361035333836e-05, | |
| "loss": 0.5387, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 16.67447306791569, | |
| "grad_norm": 4.230076789855957, | |
| "learning_rate": 4.4758415725463746e-05, | |
| "loss": 0.5594, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 16.861826697892273, | |
| "grad_norm": 3.655414581298828, | |
| "learning_rate": 4.464438569430354e-05, | |
| "loss": 0.5556, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 17.049180327868854, | |
| "grad_norm": 3.818207025527954, | |
| "learning_rate": 4.452927720201112e-05, | |
| "loss": 0.5367, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 17.236533957845435, | |
| "grad_norm": 3.827942371368408, | |
| "learning_rate": 4.441309656795106e-05, | |
| "loss": 0.5108, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 17.423887587822016, | |
| "grad_norm": 3.933605194091797, | |
| "learning_rate": 4.4295850170347664e-05, | |
| "loss": 0.5258, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 17.611241217798593, | |
| "grad_norm": 4.577270030975342, | |
| "learning_rate": 4.417754444593478e-05, | |
| "loss": 0.5313, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 17.798594847775174, | |
| "grad_norm": 5.285182476043701, | |
| "learning_rate": 4.40581858896025e-05, | |
| "loss": 0.5421, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 17.985948477751755, | |
| "grad_norm": 4.50776481628418, | |
| "learning_rate": 4.3937781054040505e-05, | |
| "loss": 0.5428, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 18.173302107728336, | |
| "grad_norm": 3.9270503520965576, | |
| "learning_rate": 4.38163365493784e-05, | |
| "loss": 0.4744, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 18.360655737704917, | |
| "grad_norm": 4.346292018890381, | |
| "learning_rate": 4.3693859042822774e-05, | |
| "loss": 0.5177, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 18.548009367681498, | |
| "grad_norm": 3.9130234718322754, | |
| "learning_rate": 4.357035525829123e-05, | |
| "loss": 0.5387, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 18.73536299765808, | |
| "grad_norm": 3.995023250579834, | |
| "learning_rate": 4.344583197604318e-05, | |
| "loss": 0.5071, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 18.92271662763466, | |
| "grad_norm": 3.8441927433013916, | |
| "learning_rate": 4.3320296032307675e-05, | |
| "loss": 0.5138, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 19.11007025761124, | |
| "grad_norm": 5.217072010040283, | |
| "learning_rate": 4.319375431890806e-05, | |
| "loss": 0.4962, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 19.297423887587822, | |
| "grad_norm": 5.450128078460693, | |
| "learning_rate": 4.306621378288364e-05, | |
| "loss": 0.4835, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 19.484777517564403, | |
| "grad_norm": 3.889098644256592, | |
| "learning_rate": 4.293768142610828e-05, | |
| "loss": 0.499, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 19.672131147540984, | |
| "grad_norm": 5.706878662109375, | |
| "learning_rate": 4.2808164304906026e-05, | |
| "loss": 0.5214, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 19.859484777517565, | |
| "grad_norm": 4.337571620941162, | |
| "learning_rate": 4.267766952966369e-05, | |
| "loss": 0.4985, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 20.046838407494146, | |
| "grad_norm": 4.532181262969971, | |
| "learning_rate": 4.254620426444053e-05, | |
| "loss": 0.5146, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 20.234192037470727, | |
| "grad_norm": 5.095364570617676, | |
| "learning_rate": 4.241377572657493e-05, | |
| "loss": 0.4821, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 20.421545667447308, | |
| "grad_norm": 4.10978889465332, | |
| "learning_rate": 4.228039118628816e-05, | |
| "loss": 0.4685, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 20.60889929742389, | |
| "grad_norm": 3.92051100730896, | |
| "learning_rate": 4.214605796628527e-05, | |
| "loss": 0.4857, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 20.796252927400467, | |
| "grad_norm": 5.784697532653809, | |
| "learning_rate": 4.2010783441353064e-05, | |
| "loss": 0.478, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 20.983606557377048, | |
| "grad_norm": 4.860243320465088, | |
| "learning_rate": 4.187457503795527e-05, | |
| "loss": 0.509, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 21.17096018735363, | |
| "grad_norm": 4.482259750366211, | |
| "learning_rate": 4.173744023382474e-05, | |
| "loss": 0.4508, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 21.35831381733021, | |
| "grad_norm": 4.613847255706787, | |
| "learning_rate": 4.159938655755306e-05, | |
| "loss": 0.4748, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 21.54566744730679, | |
| "grad_norm": 5.015346050262451, | |
| "learning_rate": 4.14604215881771e-05, | |
| "loss": 0.4587, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 21.73302107728337, | |
| "grad_norm": 4.96614933013916, | |
| "learning_rate": 4.1320552954763044e-05, | |
| "loss": 0.4619, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 21.920374707259953, | |
| "grad_norm": 4.224035739898682, | |
| "learning_rate": 4.117978833598747e-05, | |
| "loss": 0.476, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 22.107728337236534, | |
| "grad_norm": 4.694319725036621, | |
| "learning_rate": 4.103813545971589e-05, | |
| "loss": 0.4964, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 22.295081967213115, | |
| "grad_norm": 4.884482383728027, | |
| "learning_rate": 4.089560210257838e-05, | |
| "loss": 0.4527, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 22.482435597189696, | |
| "grad_norm": 4.385354518890381, | |
| "learning_rate": 4.075219608954278e-05, | |
| "loss": 0.4593, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 22.669789227166277, | |
| "grad_norm": 4.597926139831543, | |
| "learning_rate": 4.0607925293485e-05, | |
| "loss": 0.4531, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 22.857142857142858, | |
| "grad_norm": 4.493633270263672, | |
| "learning_rate": 4.046279763475687e-05, | |
| "loss": 0.4357, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 23.04449648711944, | |
| "grad_norm": 4.796749114990234, | |
| "learning_rate": 4.031682108075129e-05, | |
| "loss": 0.4529, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 23.23185011709602, | |
| "grad_norm": 4.556844711303711, | |
| "learning_rate": 4.017000364546484e-05, | |
| "loss": 0.4613, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 23.4192037470726, | |
| "grad_norm": 5.220188140869141, | |
| "learning_rate": 4.00223533890578e-05, | |
| "loss": 0.433, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 23.60655737704918, | |
| "grad_norm": 5.892832279205322, | |
| "learning_rate": 3.987387841741169e-05, | |
| "loss": 0.4388, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 23.793911007025763, | |
| "grad_norm": 5.893739700317383, | |
| "learning_rate": 3.972458688168421e-05, | |
| "loss": 0.4542, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 23.98126463700234, | |
| "grad_norm": 4.80185079574585, | |
| "learning_rate": 3.95744869778618e-05, | |
| "loss": 0.4289, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 24.16861826697892, | |
| "grad_norm": 4.6050028800964355, | |
| "learning_rate": 3.942358694630967e-05, | |
| "loss": 0.438, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 24.355971896955502, | |
| "grad_norm": 4.912991046905518, | |
| "learning_rate": 3.927189507131938e-05, | |
| "loss": 0.4329, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 24.543325526932083, | |
| "grad_norm": 4.563324928283691, | |
| "learning_rate": 3.911941968065408e-05, | |
| "loss": 0.4508, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 24.730679156908664, | |
| "grad_norm": 5.688429355621338, | |
| "learning_rate": 3.896616914509131e-05, | |
| "loss": 0.4303, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 24.918032786885245, | |
| "grad_norm": 5.020318984985352, | |
| "learning_rate": 3.881215187796344e-05, | |
| "loss": 0.4255, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 25.105386416861826, | |
| "grad_norm": 6.469161510467529, | |
| "learning_rate": 3.8657376334695794e-05, | |
| "loss": 0.4206, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 25.292740046838407, | |
| "grad_norm": 4.912801742553711, | |
| "learning_rate": 3.8501851012342446e-05, | |
| "loss": 0.4545, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 25.480093676814988, | |
| "grad_norm": 4.866584777832031, | |
| "learning_rate": 3.8345584449119776e-05, | |
| "loss": 0.4167, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 25.66744730679157, | |
| "grad_norm": 4.946688175201416, | |
| "learning_rate": 3.818858522393764e-05, | |
| "loss": 0.423, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 25.85480093676815, | |
| "grad_norm": 5.518560886383057, | |
| "learning_rate": 3.80308619559285e-05, | |
| "loss": 0.4393, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 26.04215456674473, | |
| "grad_norm": 4.3973283767700195, | |
| "learning_rate": 3.787242330397418e-05, | |
| "loss": 0.4142, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 26.229508196721312, | |
| "grad_norm": 4.848239898681641, | |
| "learning_rate": 3.7713277966230514e-05, | |
| "loss": 0.4099, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 26.416861826697893, | |
| "grad_norm": 4.581872463226318, | |
| "learning_rate": 3.755343467964981e-05, | |
| "loss": 0.3986, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 26.604215456674474, | |
| "grad_norm": 4.8950419425964355, | |
| "learning_rate": 3.7392902219501234e-05, | |
| "loss": 0.4239, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 26.791569086651055, | |
| "grad_norm": 4.911006450653076, | |
| "learning_rate": 3.723168939888901e-05, | |
| "loss": 0.4013, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 26.978922716627636, | |
| "grad_norm": 5.166593074798584, | |
| "learning_rate": 3.706980506826863e-05, | |
| "loss": 0.4381, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 27.166276346604217, | |
| "grad_norm": 5.103715896606445, | |
| "learning_rate": 3.690725811496092e-05, | |
| "loss": 0.4018, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 27.353629976580795, | |
| "grad_norm": 5.886387825012207, | |
| "learning_rate": 3.67440574626642e-05, | |
| "loss": 0.3995, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 27.540983606557376, | |
| "grad_norm": 5.4808268547058105, | |
| "learning_rate": 3.6580212070964324e-05, | |
| "loss": 0.4004, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 27.728337236533957, | |
| "grad_norm": 4.735198020935059, | |
| "learning_rate": 3.6415730934842827e-05, | |
| "loss": 0.3999, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 27.915690866510538, | |
| "grad_norm": 5.017817497253418, | |
| "learning_rate": 3.625062308418312e-05, | |
| "loss": 0.4149, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 28.10304449648712, | |
| "grad_norm": 5.427640914916992, | |
| "learning_rate": 3.608489758327472e-05, | |
| "loss": 0.4194, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 28.2903981264637, | |
| "grad_norm": 5.762901306152344, | |
| "learning_rate": 3.5918563530315664e-05, | |
| "loss": 0.3852, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 28.47775175644028, | |
| "grad_norm": 4.809139728546143, | |
| "learning_rate": 3.575163005691302e-05, | |
| "loss": 0.3858, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 28.66510538641686, | |
| "grad_norm": 5.195406436920166, | |
| "learning_rate": 3.5584106327581536e-05, | |
| "loss": 0.3931, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 28.852459016393443, | |
| "grad_norm": 5.359652996063232, | |
| "learning_rate": 3.541600153924058e-05, | |
| "loss": 0.4196, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 29.039812646370024, | |
| "grad_norm": 4.440913200378418, | |
| "learning_rate": 3.524732492070915e-05, | |
| "loss": 0.3731, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 29.227166276346605, | |
| "grad_norm": 4.653567790985107, | |
| "learning_rate": 3.507808573219931e-05, | |
| "loss": 0.3628, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 29.414519906323186, | |
| "grad_norm": 6.330109119415283, | |
| "learning_rate": 3.4908293264807734e-05, | |
| "loss": 0.4074, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 29.601873536299767, | |
| "grad_norm": 4.850047588348389, | |
| "learning_rate": 3.473795684000569e-05, | |
| "loss": 0.3495, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 29.789227166276348, | |
| "grad_norm": 5.027477264404297, | |
| "learning_rate": 3.456708580912725e-05, | |
| "loss": 0.409, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 29.97658079625293, | |
| "grad_norm": 5.475801467895508, | |
| "learning_rate": 3.4395689552855955e-05, | |
| "loss": 0.3954, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 30.16393442622951, | |
| "grad_norm": 4.786725044250488, | |
| "learning_rate": 3.4223777480709805e-05, | |
| "loss": 0.3666, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 30.35128805620609, | |
| "grad_norm": 6.034923076629639, | |
| "learning_rate": 3.4051359030524654e-05, | |
| "loss": 0.3557, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 30.538641686182668, | |
| "grad_norm": 5.1971755027771, | |
| "learning_rate": 3.387844366793614e-05, | |
| "loss": 0.3563, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 30.72599531615925, | |
| "grad_norm": 5.740716457366943, | |
| "learning_rate": 3.3705040885859975e-05, | |
| "loss": 0.3874, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 30.91334894613583, | |
| "grad_norm": 5.543055057525635, | |
| "learning_rate": 3.3531160203970805e-05, | |
| "loss": 0.4104, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 31.10070257611241, | |
| "grad_norm": 4.964486598968506, | |
| "learning_rate": 3.335681116817963e-05, | |
| "loss": 0.3809, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 31.288056206088992, | |
| "grad_norm": 5.344038963317871, | |
| "learning_rate": 3.318200335010967e-05, | |
| "loss": 0.3536, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 31.475409836065573, | |
| "grad_norm": 5.5590901374816895, | |
| "learning_rate": 3.300674634657094e-05, | |
| "loss": 0.3613, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 31.662763466042154, | |
| "grad_norm": 5.377177715301514, | |
| "learning_rate": 3.28310497790334e-05, | |
| "loss": 0.3828, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 31.850117096018735, | |
| "grad_norm": 5.412164688110352, | |
| "learning_rate": 3.265492329309867e-05, | |
| "loss": 0.3481, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 32.03747072599531, | |
| "grad_norm": 4.8478593826293945, | |
| "learning_rate": 3.247837655797061e-05, | |
| "loss": 0.3609, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 32.2248243559719, | |
| "grad_norm": 4.969062328338623, | |
| "learning_rate": 3.2301419265924395e-05, | |
| "loss": 0.3319, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 32.412177985948475, | |
| "grad_norm": 5.705166816711426, | |
| "learning_rate": 3.212406113177445e-05, | |
| "loss": 0.3464, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 32.59953161592506, | |
| "grad_norm": 5.679583549499512, | |
| "learning_rate": 3.19463118923411e-05, | |
| "loss": 0.3518, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 32.78688524590164, | |
| "grad_norm": 5.57464075088501, | |
| "learning_rate": 3.1768181305916066e-05, | |
| "loss": 0.3644, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 32.97423887587822, | |
| "grad_norm": 5.604707717895508, | |
| "learning_rate": 3.158967915172669e-05, | |
| "loss": 0.3856, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 33.1615925058548, | |
| "grad_norm": 4.796475887298584, | |
| "learning_rate": 3.1410815229399116e-05, | |
| "loss": 0.349, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 33.34894613583138, | |
| "grad_norm": 5.308904647827148, | |
| "learning_rate": 3.123159935842024e-05, | |
| "loss": 0.3319, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 33.53629976580796, | |
| "grad_norm": 5.319141387939453, | |
| "learning_rate": 3.1052041377598674e-05, | |
| "loss": 0.3403, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 33.723653395784545, | |
| "grad_norm": 6.2813496589660645, | |
| "learning_rate": 3.0872151144524595e-05, | |
| "loss": 0.331, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 33.91100702576112, | |
| "grad_norm": 6.759060859680176, | |
| "learning_rate": 3.069193853502855e-05, | |
| "loss": 0.3683, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 34.09836065573771, | |
| "grad_norm": 5.812283515930176, | |
| "learning_rate": 3.0511413442639296e-05, | |
| "loss": 0.3389, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 34.285714285714285, | |
| "grad_norm": 5.7895026206970215, | |
| "learning_rate": 3.0330585778040677e-05, | |
| "loss": 0.3302, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 34.47306791569087, | |
| "grad_norm": 5.957363605499268, | |
| "learning_rate": 3.014946546852746e-05, | |
| "loss": 0.3334, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 34.66042154566745, | |
| "grad_norm": 5.722705364227295, | |
| "learning_rate": 2.996806245746044e-05, | |
| "loss": 0.3419, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 34.84777517564403, | |
| "grad_norm": 6.884836196899414, | |
| "learning_rate": 2.978638670372047e-05, | |
| "loss": 0.3375, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 35.03512880562061, | |
| "grad_norm": 4.799129962921143, | |
| "learning_rate": 2.960444818116176e-05, | |
| "loss": 0.3473, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 35.222482435597186, | |
| "grad_norm": 5.28724479675293, | |
| "learning_rate": 2.9422256878064325e-05, | |
| "loss": 0.2882, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 35.40983606557377, | |
| "grad_norm": 5.755520343780518, | |
| "learning_rate": 2.923982279658564e-05, | |
| "loss": 0.3336, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 35.59718969555035, | |
| "grad_norm": 5.549156188964844, | |
| "learning_rate": 2.9057155952211502e-05, | |
| "loss": 0.3487, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 35.78454332552693, | |
| "grad_norm": 5.643553256988525, | |
| "learning_rate": 2.8874266373206216e-05, | |
| "loss": 0.3483, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 35.97189695550351, | |
| "grad_norm": 5.7322540283203125, | |
| "learning_rate": 2.8691164100062034e-05, | |
| "loss": 0.3339, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 36.159250585480095, | |
| "grad_norm": 9.893467903137207, | |
| "learning_rate": 2.8507859184947957e-05, | |
| "loss": 0.3177, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 36.34660421545667, | |
| "grad_norm": 4.880067348480225, | |
| "learning_rate": 2.8324361691157858e-05, | |
| "loss": 0.3103, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 36.53395784543326, | |
| "grad_norm": 5.725404262542725, | |
| "learning_rate": 2.8140681692558035e-05, | |
| "loss": 0.3145, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 36.721311475409834, | |
| "grad_norm": 5.862415313720703, | |
| "learning_rate": 2.7956829273034148e-05, | |
| "loss": 0.3246, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 36.90866510538642, | |
| "grad_norm": 6.573834419250488, | |
| "learning_rate": 2.7772814525937635e-05, | |
| "loss": 0.3479, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 37.096018735362996, | |
| "grad_norm": 6.182538032531738, | |
| "learning_rate": 2.7588647553531576e-05, | |
| "loss": 0.324, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 37.28337236533958, | |
| "grad_norm": 5.400746822357178, | |
| "learning_rate": 2.740433846643612e-05, | |
| "loss": 0.3064, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 37.47072599531616, | |
| "grad_norm": 5.840080738067627, | |
| "learning_rate": 2.7219897383073373e-05, | |
| "loss": 0.3211, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 37.65807962529274, | |
| "grad_norm": 5.707023620605469, | |
| "learning_rate": 2.7035334429111958e-05, | |
| "loss": 0.3285, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 37.84543325526932, | |
| "grad_norm": 5.929535865783691, | |
| "learning_rate": 2.6850659736911073e-05, | |
| "loss": 0.3126, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 38.032786885245905, | |
| "grad_norm": 4.90598726272583, | |
| "learning_rate": 2.6665883444964278e-05, | |
| "loss": 0.3101, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 38.22014051522248, | |
| "grad_norm": 6.5826616287231445, | |
| "learning_rate": 2.648101569734286e-05, | |
| "loss": 0.3216, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 38.40749414519907, | |
| "grad_norm": 5.600116729736328, | |
| "learning_rate": 2.629606664313896e-05, | |
| "loss": 0.2956, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 38.594847775175644, | |
| "grad_norm": 5.065032482147217, | |
| "learning_rate": 2.6111046435908383e-05, | |
| "loss": 0.2983, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 38.78220140515222, | |
| "grad_norm": 5.835258483886719, | |
| "learning_rate": 2.5925965233113174e-05, | |
| "loss": 0.3178, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 38.969555035128806, | |
| "grad_norm": 6.896608352661133, | |
| "learning_rate": 2.5740833195563996e-05, | |
| "loss": 0.309, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 39.156908665105384, | |
| "grad_norm": 5.803635120391846, | |
| "learning_rate": 2.5555660486862294e-05, | |
| "loss": 0.3285, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 39.34426229508197, | |
| "grad_norm": 6.226255416870117, | |
| "learning_rate": 2.537045727284232e-05, | |
| "loss": 0.2969, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 39.531615925058546, | |
| "grad_norm": 5.5187201499938965, | |
| "learning_rate": 2.5185233721013057e-05, | |
| "loss": 0.2787, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 39.71896955503513, | |
| "grad_norm": 5.887444496154785, | |
| "learning_rate": 2.5e-05, | |
| "loss": 0.2992, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 39.90632318501171, | |
| "grad_norm": 5.903340816497803, | |
| "learning_rate": 2.481476627898695e-05, | |
| "loss": 0.3051, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 40.09367681498829, | |
| "grad_norm": 5.660261631011963, | |
| "learning_rate": 2.4629542727157684e-05, | |
| "loss": 0.281, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 40.28103044496487, | |
| "grad_norm": 4.755375862121582, | |
| "learning_rate": 2.444433951313772e-05, | |
| "loss": 0.2615, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 40.468384074941454, | |
| "grad_norm": 5.870564937591553, | |
| "learning_rate": 2.4259166804436006e-05, | |
| "loss": 0.3151, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 40.65573770491803, | |
| "grad_norm": 6.845561504364014, | |
| "learning_rate": 2.407403476688683e-05, | |
| "loss": 0.3095, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 40.843091334894616, | |
| "grad_norm": 4.970867156982422, | |
| "learning_rate": 2.388895356409162e-05, | |
| "loss": 0.2828, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 41.030444964871194, | |
| "grad_norm": 5.148143768310547, | |
| "learning_rate": 2.3703933356861048e-05, | |
| "loss": 0.3038, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 41.21779859484778, | |
| "grad_norm": 5.9692583084106445, | |
| "learning_rate": 2.3518984302657146e-05, | |
| "loss": 0.2893, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 41.405152224824356, | |
| "grad_norm": 6.505251884460449, | |
| "learning_rate": 2.3334116555035724e-05, | |
| "loss": 0.288, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 41.59250585480093, | |
| "grad_norm": 5.992464542388916, | |
| "learning_rate": 2.314934026308893e-05, | |
| "loss": 0.2905, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 41.77985948477752, | |
| "grad_norm": 5.301249980926514, | |
| "learning_rate": 2.296466557088805e-05, | |
| "loss": 0.2704, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 41.967213114754095, | |
| "grad_norm": 5.807764053344727, | |
| "learning_rate": 2.2780102616926633e-05, | |
| "loss": 0.2902, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 42.15456674473068, | |
| "grad_norm": 5.2341742515563965, | |
| "learning_rate": 2.259566153356389e-05, | |
| "loss": 0.2753, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 42.34192037470726, | |
| "grad_norm": 5.364047527313232, | |
| "learning_rate": 2.2411352446468426e-05, | |
| "loss": 0.2797, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 42.52927400468384, | |
| "grad_norm": 5.803712368011475, | |
| "learning_rate": 2.2227185474062377e-05, | |
| "loss": 0.2825, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 42.71662763466042, | |
| "grad_norm": 5.672751426696777, | |
| "learning_rate": 2.2043170726965858e-05, | |
| "loss": 0.2881, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 42.903981264637004, | |
| "grad_norm": 5.412528991699219, | |
| "learning_rate": 2.1859318307441967e-05, | |
| "loss": 0.2791, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 43.09133489461358, | |
| "grad_norm": 5.854480743408203, | |
| "learning_rate": 2.1675638308842145e-05, | |
| "loss": 0.2962, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 43.278688524590166, | |
| "grad_norm": 5.328369617462158, | |
| "learning_rate": 2.1492140815052053e-05, | |
| "loss": 0.2699, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 43.46604215456674, | |
| "grad_norm": 4.856033802032471, | |
| "learning_rate": 2.1308835899937972e-05, | |
| "loss": 0.2734, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 43.65339578454333, | |
| "grad_norm": 6.26331090927124, | |
| "learning_rate": 2.1125733626793793e-05, | |
| "loss": 0.275, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 43.840749414519905, | |
| "grad_norm": 6.019165992736816, | |
| "learning_rate": 2.09428440477885e-05, | |
| "loss": 0.3627, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 44.02810304449649, | |
| "grad_norm": 5.1247711181640625, | |
| "learning_rate": 2.0760177203414368e-05, | |
| "loss": 0.2627, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 44.21545667447307, | |
| "grad_norm": 5.77355432510376, | |
| "learning_rate": 2.0577743121935684e-05, | |
| "loss": 0.2523, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 44.40281030444965, | |
| "grad_norm": 6.598211765289307, | |
| "learning_rate": 2.0395551818838247e-05, | |
| "loss": 0.2882, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 44.59016393442623, | |
| "grad_norm": 6.168797969818115, | |
| "learning_rate": 2.0213613296279533e-05, | |
| "loss": 0.2739, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 44.777517564402814, | |
| "grad_norm": 5.739145278930664, | |
| "learning_rate": 2.003193754253957e-05, | |
| "loss": 0.2569, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 44.96487119437939, | |
| "grad_norm": 5.858418941497803, | |
| "learning_rate": 1.9850534531472546e-05, | |
| "loss": 0.2575, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 45.15222482435597, | |
| "grad_norm": 5.349724769592285, | |
| "learning_rate": 1.9669414221959332e-05, | |
| "loss": 0.2596, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 45.33957845433255, | |
| "grad_norm": 6.348044395446777, | |
| "learning_rate": 1.9488586557360703e-05, | |
| "loss": 0.2643, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 45.52693208430913, | |
| "grad_norm": 5.21057653427124, | |
| "learning_rate": 1.930806146497146e-05, | |
| "loss": 0.2649, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 45.714285714285715, | |
| "grad_norm": 6.046148300170898, | |
| "learning_rate": 1.912784885547541e-05, | |
| "loss": 0.2688, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 45.90163934426229, | |
| "grad_norm": 5.811086177825928, | |
| "learning_rate": 1.8947958622401328e-05, | |
| "loss": 0.2547, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 46.08899297423888, | |
| "grad_norm": 6.68256950378418, | |
| "learning_rate": 1.876840064157976e-05, | |
| "loss": 0.2611, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 46.276346604215455, | |
| "grad_norm": 6.05044412612915, | |
| "learning_rate": 1.8589184770600893e-05, | |
| "loss": 0.2308, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 46.46370023419204, | |
| "grad_norm": 5.7417073249816895, | |
| "learning_rate": 1.8410320848273315e-05, | |
| "loss": 0.2572, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 46.65105386416862, | |
| "grad_norm": 5.557978630065918, | |
| "learning_rate": 1.8231818694083937e-05, | |
| "loss": 0.2689, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 46.8384074941452, | |
| "grad_norm": 5.32368278503418, | |
| "learning_rate": 1.8053688107658908e-05, | |
| "loss": 0.2518, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 47.02576112412178, | |
| "grad_norm": 5.275130748748779, | |
| "learning_rate": 1.7875938868225563e-05, | |
| "loss": 0.2519, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 47.21311475409836, | |
| "grad_norm": 5.496984004974365, | |
| "learning_rate": 1.769858073407561e-05, | |
| "loss": 0.2425, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 47.40046838407494, | |
| "grad_norm": 5.19797420501709, | |
| "learning_rate": 1.752162344202939e-05, | |
| "loss": 0.2665, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 47.587822014051525, | |
| "grad_norm": 5.920257091522217, | |
| "learning_rate": 1.734507670690133e-05, | |
| "loss": 0.2543, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 47.7751756440281, | |
| "grad_norm": 5.570456027984619, | |
| "learning_rate": 1.7168950220966614e-05, | |
| "loss": 0.2468, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 47.96252927400468, | |
| "grad_norm": 6.8460211753845215, | |
| "learning_rate": 1.6993253653429063e-05, | |
| "loss": 0.2541, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 48.149882903981265, | |
| "grad_norm": 6.266761779785156, | |
| "learning_rate": 1.681799664989033e-05, | |
| "loss": 0.2497, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 48.33723653395784, | |
| "grad_norm": 5.843891143798828, | |
| "learning_rate": 1.6643188831820375e-05, | |
| "loss": 0.2472, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 48.52459016393443, | |
| "grad_norm": 5.972542762756348, | |
| "learning_rate": 1.64688397960292e-05, | |
| "loss": 0.2501, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 48.711943793911004, | |
| "grad_norm": 5.9259724617004395, | |
| "learning_rate": 1.6294959114140034e-05, | |
| "loss": 0.2345, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 48.89929742388759, | |
| "grad_norm": 6.9172492027282715, | |
| "learning_rate": 1.6121556332063862e-05, | |
| "loss": 0.2492, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 49.086651053864166, | |
| "grad_norm": 5.66576623916626, | |
| "learning_rate": 1.5948640969475346e-05, | |
| "loss": 0.2464, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 49.27400468384075, | |
| "grad_norm": 5.968489646911621, | |
| "learning_rate": 1.5776222519290208e-05, | |
| "loss": 0.2453, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 49.46135831381733, | |
| "grad_norm": 5.3877739906311035, | |
| "learning_rate": 1.560431044714405e-05, | |
| "loss": 0.2331, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 49.64871194379391, | |
| "grad_norm": 6.003279685974121, | |
| "learning_rate": 1.5432914190872757e-05, | |
| "loss": 0.2451, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 49.83606557377049, | |
| "grad_norm": 5.7422709465026855, | |
| "learning_rate": 1.5262043159994317e-05, | |
| "loss": 0.2421, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 50.023419203747075, | |
| "grad_norm": 5.42122745513916, | |
| "learning_rate": 1.5091706735192268e-05, | |
| "loss": 0.2502, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 50.21077283372365, | |
| "grad_norm": 6.197154998779297, | |
| "learning_rate": 1.49219142678007e-05, | |
| "loss": 0.2319, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 50.39812646370024, | |
| "grad_norm": 6.868821620941162, | |
| "learning_rate": 1.4752675079290851e-05, | |
| "loss": 0.2446, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 50.585480093676814, | |
| "grad_norm": 5.866207599639893, | |
| "learning_rate": 1.4583998460759424e-05, | |
| "loss": 0.236, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 50.7728337236534, | |
| "grad_norm": 5.33125638961792, | |
| "learning_rate": 1.4415893672418462e-05, | |
| "loss": 0.2392, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 50.960187353629976, | |
| "grad_norm": 5.089175701141357, | |
| "learning_rate": 1.4248369943086998e-05, | |
| "loss": 0.2336, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 51.14754098360656, | |
| "grad_norm": 6.228028774261475, | |
| "learning_rate": 1.4081436469684339e-05, | |
| "loss": 0.2348, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 51.33489461358314, | |
| "grad_norm": 5.978231906890869, | |
| "learning_rate": 1.3915102416725287e-05, | |
| "loss": 0.2337, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 51.522248243559716, | |
| "grad_norm": 5.551085472106934, | |
| "learning_rate": 1.3749376915816886e-05, | |
| "loss": 0.2258, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 51.7096018735363, | |
| "grad_norm": 6.166746616363525, | |
| "learning_rate": 1.3584269065157174e-05, | |
| "loss": 0.2436, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 51.89695550351288, | |
| "grad_norm": 6.301324844360352, | |
| "learning_rate": 1.3419787929035682e-05, | |
| "loss": 0.2402, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 52.08430913348946, | |
| "grad_norm": 4.700679302215576, | |
| "learning_rate": 1.3255942537335805e-05, | |
| "loss": 0.2167, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 52.27166276346604, | |
| "grad_norm": 5.628973007202148, | |
| "learning_rate": 1.3092741885039087e-05, | |
| "loss": 0.2298, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 52.459016393442624, | |
| "grad_norm": 5.110661029815674, | |
| "learning_rate": 1.2930194931731382e-05, | |
| "loss": 0.2329, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 52.6463700234192, | |
| "grad_norm": 6.139814853668213, | |
| "learning_rate": 1.2768310601110994e-05, | |
| "loss": 0.2341, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 52.833723653395786, | |
| "grad_norm": 5.745288848876953, | |
| "learning_rate": 1.2607097780498772e-05, | |
| "loss": 0.2203, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 53.021077283372364, | |
| "grad_norm": 4.815990447998047, | |
| "learning_rate": 1.2446565320350185e-05, | |
| "loss": 0.2309, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 53.20843091334895, | |
| "grad_norm": 5.185337543487549, | |
| "learning_rate": 1.2286722033769493e-05, | |
| "loss": 0.2125, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 53.395784543325526, | |
| "grad_norm": 5.8394904136657715, | |
| "learning_rate": 1.2127576696025828e-05, | |
| "loss": 0.2138, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 53.58313817330211, | |
| "grad_norm": 5.652061462402344, | |
| "learning_rate": 1.1969138044071501e-05, | |
| "loss": 0.2351, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 53.77049180327869, | |
| "grad_norm": 5.9499897956848145, | |
| "learning_rate": 1.1811414776062366e-05, | |
| "loss": 0.2328, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 53.95784543325527, | |
| "grad_norm": 5.064215660095215, | |
| "learning_rate": 1.1654415550880243e-05, | |
| "loss": 0.2259, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 54.14519906323185, | |
| "grad_norm": 5.09717321395874, | |
| "learning_rate": 1.149814898765755e-05, | |
| "loss": 0.2289, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 54.332552693208434, | |
| "grad_norm": 6.277153491973877, | |
| "learning_rate": 1.1342623665304209e-05, | |
| "loss": 0.2218, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 54.51990632318501, | |
| "grad_norm": 6.414435863494873, | |
| "learning_rate": 1.1187848122036563e-05, | |
| "loss": 0.2083, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 54.70725995316159, | |
| "grad_norm": 5.3766069412231445, | |
| "learning_rate": 1.1033830854908691e-05, | |
| "loss": 0.2259, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 54.894613583138174, | |
| "grad_norm": 5.645792007446289, | |
| "learning_rate": 1.088058031934592e-05, | |
| "loss": 0.2237, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 55.08196721311475, | |
| "grad_norm": 5.73775053024292, | |
| "learning_rate": 1.0728104928680624e-05, | |
| "loss": 0.2135, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 55.269320843091336, | |
| "grad_norm": 5.055363655090332, | |
| "learning_rate": 1.0576413053690327e-05, | |
| "loss": 0.214, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 55.45667447306791, | |
| "grad_norm": 5.82523775100708, | |
| "learning_rate": 1.0425513022138203e-05, | |
| "loss": 0.2245, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 55.6440281030445, | |
| "grad_norm": 5.7269287109375, | |
| "learning_rate": 1.0275413118315799e-05, | |
| "loss": 0.2157, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 55.831381733021075, | |
| "grad_norm": 5.7951178550720215, | |
| "learning_rate": 1.0126121582588316e-05, | |
| "loss": 0.211, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 56.01873536299766, | |
| "grad_norm": 4.387237548828125, | |
| "learning_rate": 9.977646610942202e-06, | |
| "loss": 0.203, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 56.20608899297424, | |
| "grad_norm": 6.9729485511779785, | |
| "learning_rate": 9.829996354535172e-06, | |
| "loss": 0.199, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 56.39344262295082, | |
| "grad_norm": 5.916283130645752, | |
| "learning_rate": 9.683178919248712e-06, | |
| "loss": 0.2145, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 56.5807962529274, | |
| "grad_norm": 6.08202600479126, | |
| "learning_rate": 9.53720236524313e-06, | |
| "loss": 0.2071, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 56.768149882903984, | |
| "grad_norm": 5.249958515167236, | |
| "learning_rate": 9.392074706515003e-06, | |
| "loss": 0.2208, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 56.95550351288056, | |
| "grad_norm": 5.584343433380127, | |
| "learning_rate": 9.247803910457226e-06, | |
| "loss": 0.2224, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 57.142857142857146, | |
| "grad_norm": 5.786952018737793, | |
| "learning_rate": 9.104397897421623e-06, | |
| "loss": 0.1905, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 57.33021077283372, | |
| "grad_norm": 5.917386054992676, | |
| "learning_rate": 8.96186454028412e-06, | |
| "loss": 0.2157, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 57.51756440281031, | |
| "grad_norm": 7.27882719039917, | |
| "learning_rate": 8.820211664012532e-06, | |
| "loss": 0.2165, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 57.704918032786885, | |
| "grad_norm": 5.488128662109375, | |
| "learning_rate": 8.679447045236962e-06, | |
| "loss": 0.1975, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 57.89227166276346, | |
| "grad_norm": 7.09759521484375, | |
| "learning_rate": 8.539578411822901e-06, | |
| "loss": 0.2117, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 58.07962529274005, | |
| "grad_norm": 5.4233832359313965, | |
| "learning_rate": 8.400613442446948e-06, | |
| "loss": 0.204, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 58.266978922716625, | |
| "grad_norm": 5.012626647949219, | |
| "learning_rate": 8.262559766175254e-06, | |
| "loss": 0.2188, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 58.45433255269321, | |
| "grad_norm": 5.272639751434326, | |
| "learning_rate": 8.125424962044742e-06, | |
| "loss": 0.2095, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 58.64168618266979, | |
| "grad_norm": 5.037263870239258, | |
| "learning_rate": 7.989216558646942e-06, | |
| "loss": 0.2075, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 58.82903981264637, | |
| "grad_norm": 4.957757949829102, | |
| "learning_rate": 7.853942033714736e-06, | |
| "loss": 0.2034, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 59.01639344262295, | |
| "grad_norm": 5.24732780456543, | |
| "learning_rate": 7.719608813711848e-06, | |
| "loss": 0.195, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 59.20374707259953, | |
| "grad_norm": 6.088141441345215, | |
| "learning_rate": 7.586224273425082e-06, | |
| "loss": 0.1972, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 59.39110070257611, | |
| "grad_norm": 6.778179168701172, | |
| "learning_rate": 7.453795735559471e-06, | |
| "loss": 0.2012, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 59.578454332552695, | |
| "grad_norm": 5.891854763031006, | |
| "learning_rate": 7.3223304703363135e-06, | |
| "loss": 0.1973, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 59.76580796252927, | |
| "grad_norm": 5.353849411010742, | |
| "learning_rate": 7.191835695093982e-06, | |
| "loss": 0.2063, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 59.95316159250586, | |
| "grad_norm": 6.351828098297119, | |
| "learning_rate": 7.062318573891716e-06, | |
| "loss": 0.2082, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 60.140515222482435, | |
| "grad_norm": 5.508406162261963, | |
| "learning_rate": 6.933786217116364e-06, | |
| "loss": 0.1927, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 60.32786885245902, | |
| "grad_norm": 5.679818153381348, | |
| "learning_rate": 6.806245681091944e-06, | |
| "loss": 0.2101, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 60.5152224824356, | |
| "grad_norm": 6.534885406494141, | |
| "learning_rate": 6.679703967692322e-06, | |
| "loss": 0.5943, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 60.70257611241218, | |
| "grad_norm": 6.105801582336426, | |
| "learning_rate": 6.5541680239568165e-06, | |
| "loss": 0.2805, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 60.88992974238876, | |
| "grad_norm": 6.5821027755737305, | |
| "learning_rate": 6.429644741708779e-06, | |
| "loss": 0.206, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 61.077283372365336, | |
| "grad_norm": 5.045831203460693, | |
| "learning_rate": 6.3061409571772254e-06, | |
| "loss": 0.1883, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 61.26463700234192, | |
| "grad_norm": 5.163257122039795, | |
| "learning_rate": 6.183663450621607e-06, | |
| "loss": 0.1979, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 61.4519906323185, | |
| "grad_norm": 6.092082500457764, | |
| "learning_rate": 6.062218945959497e-06, | |
| "loss": 0.2002, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 61.63934426229508, | |
| "grad_norm": 5.395636558532715, | |
| "learning_rate": 5.941814110397503e-06, | |
| "loss": 0.2, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 61.82669789227166, | |
| "grad_norm": 5.0518903732299805, | |
| "learning_rate": 5.822455554065217e-06, | |
| "loss": 0.202, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 62.014051522248245, | |
| "grad_norm": 5.086590766906738, | |
| "learning_rate": 5.704149829652342e-06, | |
| "loss": 0.1903, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 62.20140515222482, | |
| "grad_norm": 5.284370422363281, | |
| "learning_rate": 5.586903432048943e-06, | |
| "loss": 0.2081, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 62.38875878220141, | |
| "grad_norm": 5.229597568511963, | |
| "learning_rate": 5.4707227979888834e-06, | |
| "loss": 0.1881, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 62.576112412177984, | |
| "grad_norm": 5.299145698547363, | |
| "learning_rate": 5.355614305696468e-06, | |
| "loss": 0.192, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 62.76346604215457, | |
| "grad_norm": 5.264816761016846, | |
| "learning_rate": 5.241584274536259e-06, | |
| "loss": 0.1923, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 62.950819672131146, | |
| "grad_norm": 5.120273590087891, | |
| "learning_rate": 5.128638964666166e-06, | |
| "loss": 0.1954, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 63.13817330210773, | |
| "grad_norm": 5.393264293670654, | |
| "learning_rate": 5.016784576693781e-06, | |
| "loss": 0.2009, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 63.32552693208431, | |
| "grad_norm": 6.210027694702148, | |
| "learning_rate": 4.906027251335918e-06, | |
| "loss": 0.1969, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 63.51288056206089, | |
| "grad_norm": 4.836513042449951, | |
| "learning_rate": 4.796373069081547e-06, | |
| "loss": 0.189, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 63.70023419203747, | |
| "grad_norm": 5.6751556396484375, | |
| "learning_rate": 4.687828049857967e-06, | |
| "loss": 0.1785, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 63.887587822014055, | |
| "grad_norm": 5.045291423797607, | |
| "learning_rate": 4.580398152700305e-06, | |
| "loss": 0.1895, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 64.07494145199063, | |
| "grad_norm": 5.037576198577881, | |
| "learning_rate": 4.474089275424351e-06, | |
| "loss": 0.1897, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 64.26229508196721, | |
| "grad_norm": 5.322866916656494, | |
| "learning_rate": 4.368907254302837e-06, | |
| "loss": 0.1786, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 64.4496487119438, | |
| "grad_norm": 5.449312210083008, | |
| "learning_rate": 4.264857863744956e-06, | |
| "loss": 0.1931, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 64.63700234192038, | |
| "grad_norm": 5.3389177322387695, | |
| "learning_rate": 4.161946815979403e-06, | |
| "loss": 0.1919, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 64.82435597189695, | |
| "grad_norm": 6.1554789543151855, | |
| "learning_rate": 4.060179760740751e-06, | |
| "loss": 0.1958, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 65.01170960187353, | |
| "grad_norm": 4.999247074127197, | |
| "learning_rate": 3.9595622849593005e-06, | |
| "loss": 0.1955, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 65.19906323185012, | |
| "grad_norm": 5.010692596435547, | |
| "learning_rate": 3.860099912454346e-06, | |
| "loss": 0.1883, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 65.3864168618267, | |
| "grad_norm": 5.899059772491455, | |
| "learning_rate": 3.7617981036309537e-06, | |
| "loss": 0.1997, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 65.57377049180327, | |
| "grad_norm": 5.52278995513916, | |
| "learning_rate": 3.6646622551801345e-06, | |
| "loss": 0.1814, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 65.76112412177986, | |
| "grad_norm": 5.3710618019104, | |
| "learning_rate": 3.568697699782625e-06, | |
| "loss": 0.1923, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 65.94847775175644, | |
| "grad_norm": 5.129755973815918, | |
| "learning_rate": 3.4739097058161114e-06, | |
| "loss": 0.1829, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 66.13583138173303, | |
| "grad_norm": 5.660995960235596, | |
| "learning_rate": 3.3803034770659825e-06, | |
| "loss": 0.19, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 66.3231850117096, | |
| "grad_norm": 5.289727210998535, | |
| "learning_rate": 3.2878841524396465e-06, | |
| "loss": 0.192, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 66.51053864168618, | |
| "grad_norm": 5.043153285980225, | |
| "learning_rate": 3.19665680568445e-06, | |
| "loss": 0.1912, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 66.69789227166277, | |
| "grad_norm": 4.871051788330078, | |
| "learning_rate": 3.1066264451090815e-06, | |
| "loss": 0.1766, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 66.88524590163935, | |
| "grad_norm": 4.7814741134643555, | |
| "learning_rate": 3.0177980133086453e-06, | |
| "loss": 0.1744, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 67.07259953161592, | |
| "grad_norm": 4.574152946472168, | |
| "learning_rate": 2.9301763868933157e-06, | |
| "loss": 0.1908, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 67.2599531615925, | |
| "grad_norm": 4.687925338745117, | |
| "learning_rate": 2.8437663762206163e-06, | |
| "loss": 0.1914, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 67.44730679156909, | |
| "grad_norm": 5.131960868835449, | |
| "learning_rate": 2.75857272513132e-06, | |
| "loss": 0.1844, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 67.63466042154566, | |
| "grad_norm": 6.343697547912598, | |
| "learning_rate": 2.674600110689038e-06, | |
| "loss": 0.1841, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 67.82201405152225, | |
| "grad_norm": 4.7700676918029785, | |
| "learning_rate": 2.5918531429234368e-06, | |
| "loss": 0.1769, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 68.00936768149883, | |
| "grad_norm": 5.645007610321045, | |
| "learning_rate": 2.510336364577154e-06, | |
| "loss": 0.185, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 68.19672131147541, | |
| "grad_norm": 4.07915735244751, | |
| "learning_rate": 2.430054250856412e-06, | |
| "loss": 0.173, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 68.38407494145198, | |
| "grad_norm": 5.163491725921631, | |
| "learning_rate": 2.351011209185336e-06, | |
| "loss": 0.1972, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 68.57142857142857, | |
| "grad_norm": 5.53954553604126, | |
| "learning_rate": 2.2732115789639603e-06, | |
| "loss": 0.1818, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 68.75878220140515, | |
| "grad_norm": 5.660224437713623, | |
| "learning_rate": 2.1966596313300365e-06, | |
| "loss": 0.182, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 68.94613583138174, | |
| "grad_norm": 5.153733730316162, | |
| "learning_rate": 2.1213595689245386e-06, | |
| "loss": 0.1835, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 69.13348946135831, | |
| "grad_norm": 4.928243637084961, | |
| "learning_rate": 2.0473155256609366e-06, | |
| "loss": 0.1839, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 69.3208430913349, | |
| "grad_norm": 4.945793628692627, | |
| "learning_rate": 1.9745315664982276e-06, | |
| "loss": 0.1731, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 69.50819672131148, | |
| "grad_norm": 4.777456760406494, | |
| "learning_rate": 1.9030116872178316e-06, | |
| "loss": 0.1948, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 69.69555035128806, | |
| "grad_norm": 4.628859043121338, | |
| "learning_rate": 1.8327598142041658e-06, | |
| "loss": 0.1935, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 69.88290398126463, | |
| "grad_norm": 6.07702112197876, | |
| "learning_rate": 1.7637798042291126e-06, | |
| "loss": 0.1839, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 70.07025761124122, | |
| "grad_norm": 4.817112922668457, | |
| "learning_rate": 1.6960754442403054e-06, | |
| "loss": 0.1632, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 70.2576112412178, | |
| "grad_norm": 5.194154739379883, | |
| "learning_rate": 1.6296504511531836e-06, | |
| "loss": 0.185, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 70.44496487119437, | |
| "grad_norm": 5.810634613037109, | |
| "learning_rate": 1.5645084716469777e-06, | |
| "loss": 0.1726, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 70.63231850117096, | |
| "grad_norm": 5.319916248321533, | |
| "learning_rate": 1.5006530819644925e-06, | |
| "loss": 0.194, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 70.81967213114754, | |
| "grad_norm": 5.039867877960205, | |
| "learning_rate": 1.4380877877157834e-06, | |
| "loss": 0.1659, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 71.00702576112413, | |
| "grad_norm": 5.392953395843506, | |
| "learning_rate": 1.3768160236856675e-06, | |
| "loss": 0.1863, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 71.1943793911007, | |
| "grad_norm": 4.837090492248535, | |
| "learning_rate": 1.3168411536452152e-06, | |
| "loss": 0.1843, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 71.38173302107728, | |
| "grad_norm": 5.398366928100586, | |
| "learning_rate": 1.2581664701670298e-06, | |
| "loss": 0.1748, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 71.56908665105387, | |
| "grad_norm": 5.430683135986328, | |
| "learning_rate": 1.2007951944445122e-06, | |
| "loss": 0.1855, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 71.75644028103045, | |
| "grad_norm": 4.792428493499756, | |
| "learning_rate": 1.144730476115019e-06, | |
| "loss": 0.1852, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 71.94379391100702, | |
| "grad_norm": 5.0115156173706055, | |
| "learning_rate": 1.0899753930869394e-06, | |
| "loss": 0.173, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 72.1311475409836, | |
| "grad_norm": 5.493814945220947, | |
| "learning_rate": 1.036532951370736e-06, | |
| "loss": 0.1736, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 72.31850117096019, | |
| "grad_norm": 5.309810638427734, | |
| "learning_rate": 9.844060849138997e-07, | |
| "loss": 0.1764, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 72.50585480093677, | |
| "grad_norm": 4.86051082611084, | |
| "learning_rate": 9.335976554398912e-07, | |
| "loss": 0.1895, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 72.69320843091334, | |
| "grad_norm": 5.187326431274414, | |
| "learning_rate": 8.841104522910343e-07, | |
| "loss": 0.1815, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 72.88056206088993, | |
| "grad_norm": 4.355453968048096, | |
| "learning_rate": 8.359471922753715e-07, | |
| "loss": 0.1745, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 73.06791569086651, | |
| "grad_norm": 5.000763416290283, | |
| "learning_rate": 7.891105195175358e-07, | |
| "loss": 0.1838, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 73.2552693208431, | |
| "grad_norm": 5.15885066986084, | |
| "learning_rate": 7.43603005313559e-07, | |
| "loss": 0.1817, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 73.44262295081967, | |
| "grad_norm": 5.528547286987305, | |
| "learning_rate": 6.994271479897314e-07, | |
| "loss": 0.171, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 73.62997658079625, | |
| "grad_norm": 4.402473449707031, | |
| "learning_rate": 6.565853727654503e-07, | |
| "loss": 0.1768, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 73.81733021077284, | |
| "grad_norm": 5.020930290222168, | |
| "learning_rate": 6.150800316200605e-07, | |
| "loss": 0.1995, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 74.00468384074941, | |
| "grad_norm": 5.047281742095947, | |
| "learning_rate": 5.749134031637349e-07, | |
| "loss": 0.1641, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 74.19203747072599, | |
| "grad_norm": 5.803074359893799, | |
| "learning_rate": 5.360876925123992e-07, | |
| "loss": 0.1774, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 74.37939110070258, | |
| "grad_norm": 4.6464924812316895, | |
| "learning_rate": 4.986050311666518e-07, | |
| "loss": 0.175, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 74.56674473067916, | |
| "grad_norm": 4.660820007324219, | |
| "learning_rate": 4.6246747689474847e-07, | |
| "loss": 0.1715, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 74.75409836065573, | |
| "grad_norm": 5.668722629547119, | |
| "learning_rate": 4.2767701361964843e-07, | |
| "loss": 0.1853, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 74.94145199063232, | |
| "grad_norm": 4.63164758682251, | |
| "learning_rate": 3.9423555131007925e-07, | |
| "loss": 0.179, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 75.1288056206089, | |
| "grad_norm": 4.594822406768799, | |
| "learning_rate": 3.6214492587569316e-07, | |
| "loss": 0.1872, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 75.31615925058549, | |
| "grad_norm": 4.460590362548828, | |
| "learning_rate": 3.3140689906628054e-07, | |
| "loss": 0.1671, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 75.50351288056206, | |
| "grad_norm": 5.252081394195557, | |
| "learning_rate": 3.020231583750255e-07, | |
| "loss": 0.1735, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 75.69086651053864, | |
| "grad_norm": 4.645631313323975, | |
| "learning_rate": 2.739953169458992e-07, | |
| "loss": 0.1887, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 75.87822014051523, | |
| "grad_norm": 5.47283411026001, | |
| "learning_rate": 2.473249134850808e-07, | |
| "loss": 0.1755, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 76.06557377049181, | |
| "grad_norm": 4.764420986175537, | |
| "learning_rate": 2.2201341217648331e-07, | |
| "loss": 0.1689, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 76.25292740046838, | |
| "grad_norm": 5.967309951782227, | |
| "learning_rate": 1.9806220260137064e-07, | |
| "loss": 0.169, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 76.44028103044496, | |
| "grad_norm": 4.041045665740967, | |
| "learning_rate": 1.7547259966207708e-07, | |
| "loss": 0.1762, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 76.62763466042155, | |
| "grad_norm": 5.3411030769348145, | |
| "learning_rate": 1.5424584350981487e-07, | |
| "loss": 0.1869, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 76.81498829039813, | |
| "grad_norm": 5.776910781860352, | |
| "learning_rate": 1.343830994765982e-07, | |
| "loss": 0.1865, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 77.0023419203747, | |
| "grad_norm": 4.976191520690918, | |
| "learning_rate": 1.1588545801125838e-07, | |
| "loss": 0.1773, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 77.18969555035129, | |
| "grad_norm": 4.672201156616211, | |
| "learning_rate": 9.87539346195776e-08, | |
| "loss": 0.1874, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 77.37704918032787, | |
| "grad_norm": 4.933514595031738, | |
| "learning_rate": 8.298946980855315e-08, | |
| "loss": 0.1783, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 77.56440281030444, | |
| "grad_norm": 4.814449787139893, | |
| "learning_rate": 6.859292903474702e-08, | |
| "loss": 0.1692, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 77.75175644028103, | |
| "grad_norm": 4.985175132751465, | |
| "learning_rate": 5.5565102656787714e-08, | |
| "loss": 0.1651, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 77.93911007025761, | |
| "grad_norm": 5.773998260498047, | |
| "learning_rate": 4.390670589196622e-08, | |
| "loss": 0.1877, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 78.1264637002342, | |
| "grad_norm": 4.613093852996826, | |
| "learning_rate": 3.361837877698115e-08, | |
| "loss": 0.1777, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 78.31381733021077, | |
| "grad_norm": 5.704171180725098, | |
| "learning_rate": 2.4700686132803076e-08, | |
| "loss": 0.1708, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 78.50117096018735, | |
| "grad_norm": 4.184200763702393, | |
| "learning_rate": 1.715411753365481e-08, | |
| "loss": 0.1852, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 78.68852459016394, | |
| "grad_norm": 4.9230475425720215, | |
| "learning_rate": 1.0979087280141298e-08, | |
| "loss": 0.1659, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 78.87587822014052, | |
| "grad_norm": 5.26874303817749, | |
| "learning_rate": 6.175934376509429e-09, | |
| "loss": 0.1738, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 79.06323185011709, | |
| "grad_norm": 5.470685958862305, | |
| "learning_rate": 2.7449225120268484e-09, | |
| "loss": 0.199, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 79.25058548009368, | |
| "grad_norm": 4.462050437927246, | |
| "learning_rate": 6.862400465157403e-10, | |
| "loss": 0.1715, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 79.43793911007026, | |
| "grad_norm": 5.648906707763672, | |
| "learning_rate": 0.0, | |
| "loss": 0.1815, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 79.43793911007026, | |
| "step": 4240, | |
| "total_flos": 1.1806200422774342e+19, | |
| "train_loss": 0.4446117949373317, | |
| "train_runtime": 54362.4918, | |
| "train_samples_per_second": 6.279, | |
| "train_steps_per_second": 0.078 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 4240, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 80, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.1806200422774342e+19, | |
| "train_batch_size": 10, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |