| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.87012987012987, | |
| "eval_steps": 500, | |
| "global_step": 6000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.008116883116883116, | |
| "grad_norm": 9.878928184509277, | |
| "learning_rate": 3e-06, | |
| "loss": 0.7789, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.016233766233766232, | |
| "grad_norm": 2.0204501152038574, | |
| "learning_rate": 6.333333333333334e-06, | |
| "loss": 0.448, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.024350649350649352, | |
| "grad_norm": 1.989099383354187, | |
| "learning_rate": 9.666666666666667e-06, | |
| "loss": 0.2774, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.032467532467532464, | |
| "grad_norm": 2.263881206512451, | |
| "learning_rate": 1.3000000000000001e-05, | |
| "loss": 0.2339, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.040584415584415584, | |
| "grad_norm": 1.8205904960632324, | |
| "learning_rate": 1.6333333333333335e-05, | |
| "loss": 0.2169, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.048701298701298704, | |
| "grad_norm": 0.9809542298316956, | |
| "learning_rate": 1.9666666666666666e-05, | |
| "loss": 0.1856, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.056818181818181816, | |
| "grad_norm": 1.3591933250427246, | |
| "learning_rate": 2.3000000000000003e-05, | |
| "loss": 0.1674, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.06493506493506493, | |
| "grad_norm": 1.483484148979187, | |
| "learning_rate": 2.633333333333333e-05, | |
| "loss": 0.1764, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.07305194805194805, | |
| "grad_norm": 0.9977609515190125, | |
| "learning_rate": 2.9666666666666672e-05, | |
| "loss": 0.1497, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.08116883116883117, | |
| "grad_norm": 1.1285325288772583, | |
| "learning_rate": 3.3e-05, | |
| "loss": 0.154, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.08928571428571429, | |
| "grad_norm": 1.2136211395263672, | |
| "learning_rate": 3.633333333333333e-05, | |
| "loss": 0.1436, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.09740259740259741, | |
| "grad_norm": 1.2199478149414062, | |
| "learning_rate": 3.966666666666667e-05, | |
| "loss": 0.1325, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.10551948051948051, | |
| "grad_norm": 0.7286880016326904, | |
| "learning_rate": 4.3e-05, | |
| "loss": 0.1254, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.11363636363636363, | |
| "grad_norm": 1.066806435585022, | |
| "learning_rate": 4.633333333333333e-05, | |
| "loss": 0.1234, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.12175324675324675, | |
| "grad_norm": 1.0203993320465088, | |
| "learning_rate": 4.966666666666667e-05, | |
| "loss": 0.1119, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.12987012987012986, | |
| "grad_norm": 0.610676109790802, | |
| "learning_rate": 5.300000000000001e-05, | |
| "loss": 0.1203, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.137987012987013, | |
| "grad_norm": 1.528229832649231, | |
| "learning_rate": 5.633333333333334e-05, | |
| "loss": 0.1168, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.1461038961038961, | |
| "grad_norm": 1.8649646043777466, | |
| "learning_rate": 5.966666666666667e-05, | |
| "loss": 0.1265, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.15422077922077923, | |
| "grad_norm": 0.7550538182258606, | |
| "learning_rate": 6.3e-05, | |
| "loss": 0.114, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.16233766233766234, | |
| "grad_norm": 0.9938152432441711, | |
| "learning_rate": 6.633333333333334e-05, | |
| "loss": 0.1141, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.17045454545454544, | |
| "grad_norm": 1.0020602941513062, | |
| "learning_rate": 6.966666666666668e-05, | |
| "loss": 0.1241, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.17857142857142858, | |
| "grad_norm": 0.8025875091552734, | |
| "learning_rate": 7.3e-05, | |
| "loss": 0.1023, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.18668831168831168, | |
| "grad_norm": 0.9334425926208496, | |
| "learning_rate": 7.633333333333334e-05, | |
| "loss": 0.0927, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.19480519480519481, | |
| "grad_norm": 0.9842389225959778, | |
| "learning_rate": 7.966666666666666e-05, | |
| "loss": 0.1011, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.20292207792207792, | |
| "grad_norm": 1.6969271898269653, | |
| "learning_rate": 8.3e-05, | |
| "loss": 0.0972, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.21103896103896103, | |
| "grad_norm": 1.097373127937317, | |
| "learning_rate": 8.633333333333334e-05, | |
| "loss": 0.0949, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.21915584415584416, | |
| "grad_norm": 1.2105375528335571, | |
| "learning_rate": 8.966666666666666e-05, | |
| "loss": 0.103, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.22727272727272727, | |
| "grad_norm": 0.764797568321228, | |
| "learning_rate": 9.300000000000001e-05, | |
| "loss": 0.0957, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2353896103896104, | |
| "grad_norm": 1.3966500759124756, | |
| "learning_rate": 9.633333333333335e-05, | |
| "loss": 0.099, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2435064935064935, | |
| "grad_norm": 0.8691798448562622, | |
| "learning_rate": 9.966666666666667e-05, | |
| "loss": 0.0871, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.25162337662337664, | |
| "grad_norm": 0.8653181791305542, | |
| "learning_rate": 9.999938485971279e-05, | |
| "loss": 0.094, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.2597402597402597, | |
| "grad_norm": 0.867095410823822, | |
| "learning_rate": 9.999725846827562e-05, | |
| "loss": 0.081, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.26785714285714285, | |
| "grad_norm": 0.7629588842391968, | |
| "learning_rate": 9.999361329594254e-05, | |
| "loss": 0.081, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.275974025974026, | |
| "grad_norm": 1.119461178779602, | |
| "learning_rate": 9.998844945344405e-05, | |
| "loss": 0.0746, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.2840909090909091, | |
| "grad_norm": 0.7320038676261902, | |
| "learning_rate": 9.99817670976436e-05, | |
| "loss": 0.0798, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.2922077922077922, | |
| "grad_norm": 0.8777246475219727, | |
| "learning_rate": 9.997356643153303e-05, | |
| "loss": 0.0713, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3003246753246753, | |
| "grad_norm": 0.7135207653045654, | |
| "learning_rate": 9.996384770422629e-05, | |
| "loss": 0.0754, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.30844155844155846, | |
| "grad_norm": 0.739890456199646, | |
| "learning_rate": 9.995261121095194e-05, | |
| "loss": 0.0725, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.31655844155844154, | |
| "grad_norm": 0.720205545425415, | |
| "learning_rate": 9.993985729304408e-05, | |
| "loss": 0.0734, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.3246753246753247, | |
| "grad_norm": 0.7397477626800537, | |
| "learning_rate": 9.992558633793212e-05, | |
| "loss": 0.0713, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.3327922077922078, | |
| "grad_norm": 0.46367812156677246, | |
| "learning_rate": 9.990979877912891e-05, | |
| "loss": 0.0742, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.3409090909090909, | |
| "grad_norm": 0.6461692452430725, | |
| "learning_rate": 9.989249509621759e-05, | |
| "loss": 0.0634, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.349025974025974, | |
| "grad_norm": 0.5224289894104004, | |
| "learning_rate": 9.987367581483705e-05, | |
| "loss": 0.0647, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.35714285714285715, | |
| "grad_norm": 0.4992559552192688, | |
| "learning_rate": 9.985334150666592e-05, | |
| "loss": 0.0635, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.3652597402597403, | |
| "grad_norm": 0.728114902973175, | |
| "learning_rate": 9.983149278940526e-05, | |
| "loss": 0.0632, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.37337662337662336, | |
| "grad_norm": 0.6755104660987854, | |
| "learning_rate": 9.980813032675974e-05, | |
| "loss": 0.0696, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.3814935064935065, | |
| "grad_norm": 0.4159291982650757, | |
| "learning_rate": 9.978325482841753e-05, | |
| "loss": 0.0676, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.38961038961038963, | |
| "grad_norm": 0.5511267185211182, | |
| "learning_rate": 9.975686705002867e-05, | |
| "loss": 0.0654, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.3977272727272727, | |
| "grad_norm": 0.6259632110595703, | |
| "learning_rate": 9.972896779318219e-05, | |
| "loss": 0.0625, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.40584415584415584, | |
| "grad_norm": 0.6669071316719055, | |
| "learning_rate": 9.969955790538175e-05, | |
| "loss": 0.0609, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.413961038961039, | |
| "grad_norm": 0.4925576150417328, | |
| "learning_rate": 9.966863828001982e-05, | |
| "loss": 0.0618, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.42207792207792205, | |
| "grad_norm": 0.513395369052887, | |
| "learning_rate": 9.963620985635065e-05, | |
| "loss": 0.0606, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.4301948051948052, | |
| "grad_norm": 0.4852656424045563, | |
| "learning_rate": 9.960227361946164e-05, | |
| "loss": 0.0599, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.4383116883116883, | |
| "grad_norm": 1.118783950805664, | |
| "learning_rate": 9.95668306002435e-05, | |
| "loss": 0.06, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.44642857142857145, | |
| "grad_norm": 0.5577962398529053, | |
| "learning_rate": 9.952988187535886e-05, | |
| "loss": 0.056, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.45454545454545453, | |
| "grad_norm": 0.9623023271560669, | |
| "learning_rate": 9.949142856720961e-05, | |
| "loss": 0.0582, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.46266233766233766, | |
| "grad_norm": 0.5731148719787598, | |
| "learning_rate": 9.945147184390278e-05, | |
| "loss": 0.0515, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.4707792207792208, | |
| "grad_norm": 0.5436367392539978, | |
| "learning_rate": 9.941001291921512e-05, | |
| "loss": 0.0562, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.4788961038961039, | |
| "grad_norm": 0.5028620958328247, | |
| "learning_rate": 9.936705305255612e-05, | |
| "loss": 0.0587, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.487012987012987, | |
| "grad_norm": 0.6722061038017273, | |
| "learning_rate": 9.932259354892984e-05, | |
| "loss": 0.0621, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.49512987012987014, | |
| "grad_norm": 0.4234634339809418, | |
| "learning_rate": 9.927663575889521e-05, | |
| "loss": 0.0656, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.5032467532467533, | |
| "grad_norm": 0.4400225877761841, | |
| "learning_rate": 9.922918107852504e-05, | |
| "loss": 0.0575, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.5113636363636364, | |
| "grad_norm": 0.5401639342308044, | |
| "learning_rate": 9.918023094936363e-05, | |
| "loss": 0.0534, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.5194805194805194, | |
| "grad_norm": 0.471709281206131, | |
| "learning_rate": 9.912978685838294e-05, | |
| "loss": 0.0524, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.5275974025974026, | |
| "grad_norm": 0.6473276615142822, | |
| "learning_rate": 9.90778503379374e-05, | |
| "loss": 0.0559, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.5357142857142857, | |
| "grad_norm": 0.7314488887786865, | |
| "learning_rate": 9.902442296571743e-05, | |
| "loss": 0.0582, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.5438311688311688, | |
| "grad_norm": 0.528820276260376, | |
| "learning_rate": 9.896950636470147e-05, | |
| "loss": 0.0624, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.551948051948052, | |
| "grad_norm": 0.5930554866790771, | |
| "learning_rate": 9.891310220310666e-05, | |
| "loss": 0.0617, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.560064935064935, | |
| "grad_norm": 0.5521166324615479, | |
| "learning_rate": 9.885521219433823e-05, | |
| "loss": 0.0552, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.5681818181818182, | |
| "grad_norm": 0.5110708475112915, | |
| "learning_rate": 9.879583809693738e-05, | |
| "loss": 0.0561, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.5762987012987013, | |
| "grad_norm": 0.5363431572914124, | |
| "learning_rate": 9.873498171452789e-05, | |
| "loss": 0.0667, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.5844155844155844, | |
| "grad_norm": 0.4858008921146393, | |
| "learning_rate": 9.867264489576135e-05, | |
| "loss": 0.0526, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.5925324675324676, | |
| "grad_norm": 0.49196329712867737, | |
| "learning_rate": 9.860882953426099e-05, | |
| "loss": 0.0561, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.6006493506493507, | |
| "grad_norm": 0.42068248987197876, | |
| "learning_rate": 9.854353756856412e-05, | |
| "loss": 0.048, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.6087662337662337, | |
| "grad_norm": 0.6270432472229004, | |
| "learning_rate": 9.847677098206332e-05, | |
| "loss": 0.0525, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.6168831168831169, | |
| "grad_norm": 0.6490077376365662, | |
| "learning_rate": 9.840853180294608e-05, | |
| "loss": 0.0517, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 0.3505368232727051, | |
| "learning_rate": 9.833882210413332e-05, | |
| "loss": 0.0495, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.6331168831168831, | |
| "grad_norm": 0.4244323968887329, | |
| "learning_rate": 9.826764400321633e-05, | |
| "loss": 0.0501, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.6412337662337663, | |
| "grad_norm": 0.6171508431434631, | |
| "learning_rate": 9.819499966239243e-05, | |
| "loss": 0.0521, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.6493506493506493, | |
| "grad_norm": 0.49943363666534424, | |
| "learning_rate": 9.812089128839938e-05, | |
| "loss": 0.0591, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.6574675324675324, | |
| "grad_norm": 0.3718108534812927, | |
| "learning_rate": 9.804532113244828e-05, | |
| "loss": 0.0552, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.6655844155844156, | |
| "grad_norm": 0.4114764928817749, | |
| "learning_rate": 9.796829149015517e-05, | |
| "loss": 0.0519, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.6737012987012987, | |
| "grad_norm": 0.794797956943512, | |
| "learning_rate": 9.788980470147132e-05, | |
| "loss": 0.048, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.6818181818181818, | |
| "grad_norm": 0.5907167792320251, | |
| "learning_rate": 9.780986315061218e-05, | |
| "loss": 0.0468, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.689935064935065, | |
| "grad_norm": 0.40095576643943787, | |
| "learning_rate": 9.772846926598491e-05, | |
| "loss": 0.0544, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.698051948051948, | |
| "grad_norm": 0.6045254468917847, | |
| "learning_rate": 9.76456255201146e-05, | |
| "loss": 0.0527, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.7061688311688312, | |
| "grad_norm": 0.33633312582969666, | |
| "learning_rate": 9.756133442956923e-05, | |
| "loss": 0.0512, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.7142857142857143, | |
| "grad_norm": 0.5168439149856567, | |
| "learning_rate": 9.747559855488313e-05, | |
| "loss": 0.043, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.7224025974025974, | |
| "grad_norm": 0.5217397212982178, | |
| "learning_rate": 9.73884205004793e-05, | |
| "loss": 0.0478, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.7305194805194806, | |
| "grad_norm": 0.4128414988517761, | |
| "learning_rate": 9.729980291459019e-05, | |
| "loss": 0.054, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.7386363636363636, | |
| "grad_norm": 0.6482959985733032, | |
| "learning_rate": 9.720974848917735e-05, | |
| "loss": 0.0456, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.7467532467532467, | |
| "grad_norm": 0.6063295602798462, | |
| "learning_rate": 9.711825995984957e-05, | |
| "loss": 0.0499, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.7548701298701299, | |
| "grad_norm": 0.6088147163391113, | |
| "learning_rate": 9.702534010577991e-05, | |
| "loss": 0.0456, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.762987012987013, | |
| "grad_norm": 0.6890689730644226, | |
| "learning_rate": 9.693099174962103e-05, | |
| "loss": 0.0439, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.7711038961038961, | |
| "grad_norm": 0.5384606122970581, | |
| "learning_rate": 9.683521775741977e-05, | |
| "loss": 0.0548, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.7792207792207793, | |
| "grad_norm": 0.7219799160957336, | |
| "learning_rate": 9.673802103852979e-05, | |
| "loss": 0.0467, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.7873376623376623, | |
| "grad_norm": 0.5432652831077576, | |
| "learning_rate": 9.663940454552342e-05, | |
| "loss": 0.0479, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.7954545454545454, | |
| "grad_norm": 0.5412060618400574, | |
| "learning_rate": 9.65393712741018e-05, | |
| "loss": 0.0552, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.8035714285714286, | |
| "grad_norm": 0.506218433380127, | |
| "learning_rate": 9.6437924263004e-05, | |
| "loss": 0.0517, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.8116883116883117, | |
| "grad_norm": 0.4060725271701813, | |
| "learning_rate": 9.63350665939146e-05, | |
| "loss": 0.0486, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.8198051948051948, | |
| "grad_norm": 0.5219970941543579, | |
| "learning_rate": 9.623080139137023e-05, | |
| "loss": 0.0441, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.827922077922078, | |
| "grad_norm": 0.6814868450164795, | |
| "learning_rate": 9.612513182266447e-05, | |
| "loss": 0.0437, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.836038961038961, | |
| "grad_norm": 0.5724406242370605, | |
| "learning_rate": 9.601806109775179e-05, | |
| "loss": 0.0445, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.8441558441558441, | |
| "grad_norm": 0.5323967337608337, | |
| "learning_rate": 9.590959246914995e-05, | |
| "loss": 0.0492, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.8522727272727273, | |
| "grad_norm": 0.4202612340450287, | |
| "learning_rate": 9.579972923184122e-05, | |
| "loss": 0.0404, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.8603896103896104, | |
| "grad_norm": 0.5512884259223938, | |
| "learning_rate": 9.568847472317232e-05, | |
| "loss": 0.0521, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.8685064935064936, | |
| "grad_norm": 0.326740562915802, | |
| "learning_rate": 9.557583232275303e-05, | |
| "loss": 0.0513, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.8766233766233766, | |
| "grad_norm": 0.49678659439086914, | |
| "learning_rate": 9.546180545235344e-05, | |
| "loss": 0.0458, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.8847402597402597, | |
| "grad_norm": 0.4488849937915802, | |
| "learning_rate": 9.534639757580013e-05, | |
| "loss": 0.0361, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.8928571428571429, | |
| "grad_norm": 0.3409590721130371, | |
| "learning_rate": 9.522961219887092e-05, | |
| "loss": 0.041, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.900974025974026, | |
| "grad_norm": 0.3857138156890869, | |
| "learning_rate": 9.511145286918828e-05, | |
| "loss": 0.044, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 0.48027417063713074, | |
| "learning_rate": 9.499192317611167e-05, | |
| "loss": 0.0438, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.9172077922077922, | |
| "grad_norm": 0.26468542218208313, | |
| "learning_rate": 9.487102675062851e-05, | |
| "loss": 0.0495, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.9253246753246753, | |
| "grad_norm": 0.5093517899513245, | |
| "learning_rate": 9.474876726524374e-05, | |
| "loss": 0.0468, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.9334415584415584, | |
| "grad_norm": 0.4471321105957031, | |
| "learning_rate": 9.462514843386845e-05, | |
| "loss": 0.0451, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.9415584415584416, | |
| "grad_norm": 0.3578501343727112, | |
| "learning_rate": 9.450017401170689e-05, | |
| "loss": 0.0404, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.9496753246753247, | |
| "grad_norm": 0.46056661009788513, | |
| "learning_rate": 9.437384779514256e-05, | |
| "loss": 0.0419, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.9577922077922078, | |
| "grad_norm": 0.4485381841659546, | |
| "learning_rate": 9.424617362162271e-05, | |
| "loss": 0.0435, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.9659090909090909, | |
| "grad_norm": 0.4065086245536804, | |
| "learning_rate": 9.411715536954196e-05, | |
| "loss": 0.0422, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.974025974025974, | |
| "grad_norm": 0.49561673402786255, | |
| "learning_rate": 9.39867969581243e-05, | |
| "loss": 0.0435, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.9821428571428571, | |
| "grad_norm": 0.3293704390525818, | |
| "learning_rate": 9.385510234730415e-05, | |
| "loss": 0.044, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.9902597402597403, | |
| "grad_norm": 0.5016496181488037, | |
| "learning_rate": 9.372207553760603e-05, | |
| "loss": 0.0367, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.9983766233766234, | |
| "grad_norm": 0.5729497075080872, | |
| "learning_rate": 9.358772057002312e-05, | |
| "loss": 0.0478, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.0064935064935066, | |
| "grad_norm": 0.6327930092811584, | |
| "learning_rate": 9.345204152589428e-05, | |
| "loss": 0.0412, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.0146103896103895, | |
| "grad_norm": 0.40651750564575195, | |
| "learning_rate": 9.331504252678037e-05, | |
| "loss": 0.0459, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.0227272727272727, | |
| "grad_norm": 0.5167247653007507, | |
| "learning_rate": 9.317672773433876e-05, | |
| "loss": 0.0454, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.030844155844156, | |
| "grad_norm": 0.48275452852249146, | |
| "learning_rate": 9.30371013501972e-05, | |
| "loss": 0.0459, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.0389610389610389, | |
| "grad_norm": 0.48492079973220825, | |
| "learning_rate": 9.289616761582587e-05, | |
| "loss": 0.0391, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.047077922077922, | |
| "grad_norm": 0.5485243797302246, | |
| "learning_rate": 9.275393081240882e-05, | |
| "loss": 0.0525, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.0551948051948052, | |
| "grad_norm": 0.5031293034553528, | |
| "learning_rate": 9.261039526071374e-05, | |
| "loss": 0.0462, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.0633116883116882, | |
| "grad_norm": 0.5791725516319275, | |
| "learning_rate": 9.246556532096078e-05, | |
| "loss": 0.0483, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.0714285714285714, | |
| "grad_norm": 0.4918902814388275, | |
| "learning_rate": 9.231944539269009e-05, | |
| "loss": 0.0463, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.0795454545454546, | |
| "grad_norm": 0.5383262634277344, | |
| "learning_rate": 9.217203991462815e-05, | |
| "loss": 0.0408, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.0876623376623376, | |
| "grad_norm": 0.6195541620254517, | |
| "learning_rate": 9.202335336455296e-05, | |
| "loss": 0.0415, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.0957792207792207, | |
| "grad_norm": 0.3492124080657959, | |
| "learning_rate": 9.187339025915802e-05, | |
| "loss": 0.0436, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.103896103896104, | |
| "grad_norm": 0.3001192510128021, | |
| "learning_rate": 9.17221551539151e-05, | |
| "loss": 0.0406, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.112012987012987, | |
| "grad_norm": 0.5313782095909119, | |
| "learning_rate": 9.156965264293586e-05, | |
| "loss": 0.0306, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.12012987012987, | |
| "grad_norm": 0.35381996631622314, | |
| "learning_rate": 9.141588735883232e-05, | |
| "loss": 0.0381, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.1282467532467533, | |
| "grad_norm": 0.2853216826915741, | |
| "learning_rate": 9.126086397257612e-05, | |
| "loss": 0.0366, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.1363636363636362, | |
| "grad_norm": 0.2972594201564789, | |
| "learning_rate": 9.110458719335659e-05, | |
| "loss": 0.039, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.1444805194805194, | |
| "grad_norm": 0.31505510210990906, | |
| "learning_rate": 9.094706176843777e-05, | |
| "loss": 0.0353, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.1525974025974026, | |
| "grad_norm": 0.41417738795280457, | |
| "learning_rate": 9.078829248301417e-05, | |
| "loss": 0.0424, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.1607142857142858, | |
| "grad_norm": 0.2667694687843323, | |
| "learning_rate": 9.062828416006539e-05, | |
| "loss": 0.0395, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.1688311688311688, | |
| "grad_norm": 0.3468317687511444, | |
| "learning_rate": 9.046704166020961e-05, | |
| "loss": 0.0395, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.176948051948052, | |
| "grad_norm": 0.4519212245941162, | |
| "learning_rate": 9.030456988155596e-05, | |
| "loss": 0.0421, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.1850649350649352, | |
| "grad_norm": 0.3515416979789734, | |
| "learning_rate": 9.014087375955573e-05, | |
| "loss": 0.0356, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.1931818181818181, | |
| "grad_norm": 0.42163556814193726, | |
| "learning_rate": 8.997595826685243e-05, | |
| "loss": 0.0387, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.2012987012987013, | |
| "grad_norm": 0.3507428467273712, | |
| "learning_rate": 8.980982841313074e-05, | |
| "loss": 0.0445, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.2094155844155845, | |
| "grad_norm": 0.45810309052467346, | |
| "learning_rate": 8.964248924496435e-05, | |
| "loss": 0.0336, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.2175324675324675, | |
| "grad_norm": 0.2804049253463745, | |
| "learning_rate": 8.947394584566258e-05, | |
| "loss": 0.0415, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.2256493506493507, | |
| "grad_norm": 0.2912737727165222, | |
| "learning_rate": 8.930420333511606e-05, | |
| "loss": 0.0454, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.2337662337662338, | |
| "grad_norm": 0.3981313705444336, | |
| "learning_rate": 8.913326686964117e-05, | |
| "loss": 0.0377, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.2418831168831168, | |
| "grad_norm": 0.33030176162719727, | |
| "learning_rate": 8.89611416418234e-05, | |
| "loss": 0.0449, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.47622916102409363, | |
| "learning_rate": 8.878783288035957e-05, | |
| "loss": 0.0462, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.2581168831168832, | |
| "grad_norm": 0.3368417024612427, | |
| "learning_rate": 8.86133458498991e-05, | |
| "loss": 0.0454, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.2662337662337662, | |
| "grad_norm": 0.4337632358074188, | |
| "learning_rate": 8.843768585088393e-05, | |
| "loss": 0.0376, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.2743506493506493, | |
| "grad_norm": 0.37321558594703674, | |
| "learning_rate": 8.82608582193877e-05, | |
| "loss": 0.0357, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.2824675324675325, | |
| "grad_norm": 0.4496852457523346, | |
| "learning_rate": 8.80828683269535e-05, | |
| "loss": 0.04, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.2905844155844157, | |
| "grad_norm": 0.3610592782497406, | |
| "learning_rate": 8.790372158043074e-05, | |
| "loss": 0.0394, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.2987012987012987, | |
| "grad_norm": 0.4939509928226471, | |
| "learning_rate": 8.772342342181095e-05, | |
| "loss": 0.0389, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.3068181818181819, | |
| "grad_norm": 0.32088860869407654, | |
| "learning_rate": 8.75419793280624e-05, | |
| "loss": 0.0376, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.314935064935065, | |
| "grad_norm": 0.24955856800079346, | |
| "learning_rate": 8.735939481096378e-05, | |
| "loss": 0.0337, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.323051948051948, | |
| "grad_norm": 0.2986941635608673, | |
| "learning_rate": 8.717567541693673e-05, | |
| "loss": 0.0356, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.3311688311688312, | |
| "grad_norm": 0.6089693903923035, | |
| "learning_rate": 8.699082672687734e-05, | |
| "loss": 0.0382, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.3392857142857144, | |
| "grad_norm": 0.465394526720047, | |
| "learning_rate": 8.680485435598673e-05, | |
| "loss": 0.0462, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.3474025974025974, | |
| "grad_norm": 0.2362728714942932, | |
| "learning_rate": 8.661776395360029e-05, | |
| "loss": 0.0305, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.3555194805194806, | |
| "grad_norm": 0.32173094153404236, | |
| "learning_rate": 8.642956120301626e-05, | |
| "loss": 0.0367, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.3636363636363638, | |
| "grad_norm": 0.47564932703971863, | |
| "learning_rate": 8.624025182132292e-05, | |
| "loss": 0.0421, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.3717532467532467, | |
| "grad_norm": 0.2202228456735611, | |
| "learning_rate": 8.604984155922506e-05, | |
| "loss": 0.0399, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.37987012987013, | |
| "grad_norm": 0.21159934997558594, | |
| "learning_rate": 8.585833620086918e-05, | |
| "loss": 0.0326, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.387987012987013, | |
| "grad_norm": 0.46340233087539673, | |
| "learning_rate": 8.566574156366784e-05, | |
| "loss": 0.0377, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.396103896103896, | |
| "grad_norm": 0.44417804479599, | |
| "learning_rate": 8.547206349812298e-05, | |
| "loss": 0.0399, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.4042207792207793, | |
| "grad_norm": 0.4538224935531616, | |
| "learning_rate": 8.527730788764805e-05, | |
| "loss": 0.0356, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.4123376623376624, | |
| "grad_norm": 0.45235779881477356, | |
| "learning_rate": 8.508148064838948e-05, | |
| "loss": 0.0358, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.4204545454545454, | |
| "grad_norm": 0.32103490829467773, | |
| "learning_rate": 8.488458772904684e-05, | |
| "loss": 0.0386, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.4285714285714286, | |
| "grad_norm": 0.43833354115486145, | |
| "learning_rate": 8.468663511069217e-05, | |
| "loss": 0.0429, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.4366883116883118, | |
| "grad_norm": 0.4180199205875397, | |
| "learning_rate": 8.448762880658825e-05, | |
| "loss": 0.0369, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.4448051948051948, | |
| "grad_norm": 0.41014060378074646, | |
| "learning_rate": 8.428757486200603e-05, | |
| "loss": 0.0412, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.452922077922078, | |
| "grad_norm": 0.3040854334831238, | |
| "learning_rate": 8.40864793540409e-05, | |
| "loss": 0.0412, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.4610389610389611, | |
| "grad_norm": 0.28535085916519165, | |
| "learning_rate": 8.388434839142813e-05, | |
| "loss": 0.0391, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.469155844155844, | |
| "grad_norm": 0.5234016180038452, | |
| "learning_rate": 8.368118811435726e-05, | |
| "loss": 0.0395, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.4772727272727273, | |
| "grad_norm": 0.5190858840942383, | |
| "learning_rate": 8.347700469428564e-05, | |
| "loss": 0.0356, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.4853896103896105, | |
| "grad_norm": 0.2170562446117401, | |
| "learning_rate": 8.327180433375091e-05, | |
| "loss": 0.0373, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.4935064935064934, | |
| "grad_norm": 0.3450142741203308, | |
| "learning_rate": 8.306559326618259e-05, | |
| "loss": 0.0358, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.5016233766233766, | |
| "grad_norm": 0.49485528469085693, | |
| "learning_rate": 8.285837775571276e-05, | |
| "loss": 0.0343, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.5097402597402598, | |
| "grad_norm": 0.25820446014404297, | |
| "learning_rate": 8.265016409698573e-05, | |
| "loss": 0.0391, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.5178571428571428, | |
| "grad_norm": 0.37337490916252136, | |
| "learning_rate": 8.244095861496686e-05, | |
| "loss": 0.0367, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.525974025974026, | |
| "grad_norm": 0.3256986141204834, | |
| "learning_rate": 8.223076766475035e-05, | |
| "loss": 0.0328, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.5340909090909092, | |
| "grad_norm": 0.3565908968448639, | |
| "learning_rate": 8.201959763136633e-05, | |
| "loss": 0.0291, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.5422077922077921, | |
| "grad_norm": 0.21973878145217896, | |
| "learning_rate": 8.180745492958674e-05, | |
| "loss": 0.039, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.5503246753246753, | |
| "grad_norm": 0.6989667415618896, | |
| "learning_rate": 8.159434600373061e-05, | |
| "loss": 0.0411, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.5584415584415585, | |
| "grad_norm": 0.41696980595588684, | |
| "learning_rate": 8.138027732746818e-05, | |
| "loss": 0.0421, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.5665584415584415, | |
| "grad_norm": 0.46307626366615295, | |
| "learning_rate": 8.116525540362434e-05, | |
| "loss": 0.0376, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.5746753246753247, | |
| "grad_norm": 0.4556179940700531, | |
| "learning_rate": 8.094928676398101e-05, | |
| "loss": 0.0396, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.5827922077922079, | |
| "grad_norm": 0.2597368657588959, | |
| "learning_rate": 8.073237796907882e-05, | |
| "loss": 0.0345, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.5909090909090908, | |
| "grad_norm": 0.31811609864234924, | |
| "learning_rate": 8.051453560801772e-05, | |
| "loss": 0.032, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.599025974025974, | |
| "grad_norm": 0.2722020447254181, | |
| "learning_rate": 8.029576629825687e-05, | |
| "loss": 0.0388, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.6071428571428572, | |
| "grad_norm": 0.3664605915546417, | |
| "learning_rate": 8.007607668541362e-05, | |
| "loss": 0.0389, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.6152597402597402, | |
| "grad_norm": 0.4249238967895508, | |
| "learning_rate": 7.985547344306161e-05, | |
| "loss": 0.0359, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.6233766233766234, | |
| "grad_norm": 0.27410855889320374, | |
| "learning_rate": 7.963396327252812e-05, | |
| "loss": 0.0337, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.6314935064935066, | |
| "grad_norm": 0.5478296875953674, | |
| "learning_rate": 7.941155290269038e-05, | |
| "loss": 0.0314, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.6396103896103895, | |
| "grad_norm": 0.3749033212661743, | |
| "learning_rate": 7.918824908977123e-05, | |
| "loss": 0.0356, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.6477272727272727, | |
| "grad_norm": 0.48101192712783813, | |
| "learning_rate": 7.896405861713394e-05, | |
| "loss": 0.0407, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.655844155844156, | |
| "grad_norm": 0.3538611829280853, | |
| "learning_rate": 7.873898829507606e-05, | |
| "loss": 0.0399, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.6639610389610389, | |
| "grad_norm": 0.4626486599445343, | |
| "learning_rate": 7.851304496062254e-05, | |
| "loss": 0.0346, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.672077922077922, | |
| "grad_norm": 0.41147440671920776, | |
| "learning_rate": 7.828623547731818e-05, | |
| "loss": 0.0315, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.6801948051948052, | |
| "grad_norm": 0.28702419996261597, | |
| "learning_rate": 7.80585667350189e-05, | |
| "loss": 0.0299, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.6883116883116882, | |
| "grad_norm": 0.3159998059272766, | |
| "learning_rate": 7.783004564968263e-05, | |
| "loss": 0.0334, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.6964285714285714, | |
| "grad_norm": 0.40764641761779785, | |
| "learning_rate": 7.760067916315921e-05, | |
| "loss": 0.0307, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.7045454545454546, | |
| "grad_norm": 0.314028263092041, | |
| "learning_rate": 7.737047424297941e-05, | |
| "loss": 0.0283, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.7126623376623376, | |
| "grad_norm": 0.32912203669548035, | |
| "learning_rate": 7.713943788214337e-05, | |
| "loss": 0.0339, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.7207792207792207, | |
| "grad_norm": 0.37462761998176575, | |
| "learning_rate": 7.690757709890812e-05, | |
| "loss": 0.0285, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.728896103896104, | |
| "grad_norm": 0.2835879921913147, | |
| "learning_rate": 7.66748989365744e-05, | |
| "loss": 0.0335, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.737012987012987, | |
| "grad_norm": 0.23292797803878784, | |
| "learning_rate": 7.644141046327271e-05, | |
| "loss": 0.032, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.74512987012987, | |
| "grad_norm": 0.31655260920524597, | |
| "learning_rate": 7.620711877174866e-05, | |
| "loss": 0.0354, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.7532467532467533, | |
| "grad_norm": 0.35904660820961, | |
| "learning_rate": 7.597203097914732e-05, | |
| "loss": 0.0295, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.7613636363636362, | |
| "grad_norm": 0.5082493424415588, | |
| "learning_rate": 7.573615422679726e-05, | |
| "loss": 0.0318, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.7694805194805194, | |
| "grad_norm": 0.2941649556159973, | |
| "learning_rate": 7.549949567999345e-05, | |
| "loss": 0.0299, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.7775974025974026, | |
| "grad_norm": 0.4067605435848236, | |
| "learning_rate": 7.526206252777968e-05, | |
| "loss": 0.0323, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.7857142857142856, | |
| "grad_norm": 0.4252224564552307, | |
| "learning_rate": 7.50238619827301e-05, | |
| "loss": 0.0367, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.7938311688311688, | |
| "grad_norm": 0.4501606225967407, | |
| "learning_rate": 7.478490128073022e-05, | |
| "loss": 0.0311, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.801948051948052, | |
| "grad_norm": 0.4356978237628937, | |
| "learning_rate": 7.454518768075704e-05, | |
| "loss": 0.032, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.810064935064935, | |
| "grad_norm": 0.2771468460559845, | |
| "learning_rate": 7.430472846465856e-05, | |
| "loss": 0.0365, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.8181818181818183, | |
| "grad_norm": 0.4239960312843323, | |
| "learning_rate": 7.406353093693253e-05, | |
| "loss": 0.0332, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.8262987012987013, | |
| "grad_norm": 0.2908805012702942, | |
| "learning_rate": 7.382160242450469e-05, | |
| "loss": 0.0355, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.8344155844155843, | |
| "grad_norm": 0.36092737317085266, | |
| "learning_rate": 7.357895027650598e-05, | |
| "loss": 0.0335, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.8425324675324677, | |
| "grad_norm": 0.3536132276058197, | |
| "learning_rate": 7.333558186404958e-05, | |
| "loss": 0.0305, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.8506493506493507, | |
| "grad_norm": 0.41365113854408264, | |
| "learning_rate": 7.309150458000668e-05, | |
| "loss": 0.0354, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.8587662337662336, | |
| "grad_norm": 0.37331002950668335, | |
| "learning_rate": 7.284672583878219e-05, | |
| "loss": 0.0368, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.866883116883117, | |
| "grad_norm": 0.4106990098953247, | |
| "learning_rate": 7.260125307608929e-05, | |
| "loss": 0.0271, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.875, | |
| "grad_norm": 0.46040254831314087, | |
| "learning_rate": 7.235509374872373e-05, | |
| "loss": 0.037, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.883116883116883, | |
| "grad_norm": 0.25721243023872375, | |
| "learning_rate": 7.210825533433719e-05, | |
| "loss": 0.0284, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.8912337662337664, | |
| "grad_norm": 0.37134552001953125, | |
| "learning_rate": 7.186074533121013e-05, | |
| "loss": 0.0396, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.8993506493506493, | |
| "grad_norm": 0.36696702241897583, | |
| "learning_rate": 7.161257125802413e-05, | |
| "loss": 0.0313, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.9074675324675323, | |
| "grad_norm": 0.3280735909938812, | |
| "learning_rate": 7.136374065363334e-05, | |
| "loss": 0.0356, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.9155844155844157, | |
| "grad_norm": 0.36613646149635315, | |
| "learning_rate": 7.11142610768356e-05, | |
| "loss": 0.0329, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.9237012987012987, | |
| "grad_norm": 0.5371996164321899, | |
| "learning_rate": 7.086414010614276e-05, | |
| "loss": 0.0311, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.9318181818181817, | |
| "grad_norm": 0.34445852041244507, | |
| "learning_rate": 7.061338533955043e-05, | |
| "loss": 0.0281, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.939935064935065, | |
| "grad_norm": 0.4094446003437042, | |
| "learning_rate": 7.036200439430725e-05, | |
| "loss": 0.0347, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.948051948051948, | |
| "grad_norm": 0.3943575620651245, | |
| "learning_rate": 7.01100049066835e-05, | |
| "loss": 0.0339, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.9561688311688312, | |
| "grad_norm": 0.427166223526001, | |
| "learning_rate": 6.985739453173903e-05, | |
| "loss": 0.0316, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.9642857142857144, | |
| "grad_norm": 0.21290753781795502, | |
| "learning_rate": 6.960418094309085e-05, | |
| "loss": 0.0351, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.9724025974025974, | |
| "grad_norm": 0.32666370272636414, | |
| "learning_rate": 6.93503718326799e-05, | |
| "loss": 0.0294, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.9805194805194806, | |
| "grad_norm": 0.3321411907672882, | |
| "learning_rate": 6.909597491053751e-05, | |
| "loss": 0.0332, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.9886363636363638, | |
| "grad_norm": 0.33192306756973267, | |
| "learning_rate": 6.884099790455113e-05, | |
| "loss": 0.0301, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.9967532467532467, | |
| "grad_norm": 0.5505363941192627, | |
| "learning_rate": 6.858544856022952e-05, | |
| "loss": 0.0358, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 2.0048701298701297, | |
| "grad_norm": 0.5219383239746094, | |
| "learning_rate": 6.83293346404676e-05, | |
| "loss": 0.0346, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 2.012987012987013, | |
| "grad_norm": 0.34627780318260193, | |
| "learning_rate": 6.80726639253105e-05, | |
| "loss": 0.0377, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 2.021103896103896, | |
| "grad_norm": 0.4243893325328827, | |
| "learning_rate": 6.781544421171732e-05, | |
| "loss": 0.0301, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 2.029220779220779, | |
| "grad_norm": 0.38806095719337463, | |
| "learning_rate": 6.755768331332424e-05, | |
| "loss": 0.0304, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.0373376623376624, | |
| "grad_norm": 0.30858781933784485, | |
| "learning_rate": 6.729938906020713e-05, | |
| "loss": 0.032, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 2.0454545454545454, | |
| "grad_norm": 0.2511473298072815, | |
| "learning_rate": 6.704056929864376e-05, | |
| "loss": 0.0257, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 2.0535714285714284, | |
| "grad_norm": 0.28458356857299805, | |
| "learning_rate": 6.67812318908754e-05, | |
| "loss": 0.0294, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 2.061688311688312, | |
| "grad_norm": 0.3707312047481537, | |
| "learning_rate": 6.6521384714868e-05, | |
| "loss": 0.0314, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 2.0698051948051948, | |
| "grad_norm": 0.24003523588180542, | |
| "learning_rate": 6.626103566407295e-05, | |
| "loss": 0.0338, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 2.0779220779220777, | |
| "grad_norm": 0.3327799439430237, | |
| "learning_rate": 6.600019264718713e-05, | |
| "loss": 0.032, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 2.086038961038961, | |
| "grad_norm": 0.3021881878376007, | |
| "learning_rate": 6.573886358791285e-05, | |
| "loss": 0.0295, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 2.094155844155844, | |
| "grad_norm": 0.30725860595703125, | |
| "learning_rate": 6.547705642471703e-05, | |
| "loss": 0.0287, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 2.102272727272727, | |
| "grad_norm": 0.2985770106315613, | |
| "learning_rate": 6.521477911059008e-05, | |
| "loss": 0.0289, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 2.1103896103896105, | |
| "grad_norm": 0.2585412859916687, | |
| "learning_rate": 6.495203961280434e-05, | |
| "loss": 0.0264, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.1185064935064934, | |
| "grad_norm": 0.3345862627029419, | |
| "learning_rate": 6.468884591267204e-05, | |
| "loss": 0.0294, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 2.1266233766233764, | |
| "grad_norm": 0.41437289118766785, | |
| "learning_rate": 6.44252060053028e-05, | |
| "loss": 0.027, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 2.13474025974026, | |
| "grad_norm": 0.25786668062210083, | |
| "learning_rate": 6.416112789936086e-05, | |
| "loss": 0.0262, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 2.142857142857143, | |
| "grad_norm": 0.2157658040523529, | |
| "learning_rate": 6.389661961682173e-05, | |
| "loss": 0.0244, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 2.150974025974026, | |
| "grad_norm": 0.3058822453022003, | |
| "learning_rate": 6.363168919272846e-05, | |
| "loss": 0.0258, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 2.159090909090909, | |
| "grad_norm": 0.27886053919792175, | |
| "learning_rate": 6.336634467494768e-05, | |
| "loss": 0.0363, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 2.167207792207792, | |
| "grad_norm": 0.3333021402359009, | |
| "learning_rate": 6.310059412392505e-05, | |
| "loss": 0.0291, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 2.175324675324675, | |
| "grad_norm": 0.2074045091867447, | |
| "learning_rate": 6.283444561244042e-05, | |
| "loss": 0.0315, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 2.1834415584415585, | |
| "grad_norm": 0.24044620990753174, | |
| "learning_rate": 6.256790722536251e-05, | |
| "loss": 0.0241, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 2.1915584415584415, | |
| "grad_norm": 0.3287203013896942, | |
| "learning_rate": 6.230098705940354e-05, | |
| "loss": 0.0321, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 2.199675324675325, | |
| "grad_norm": 0.43518200516700745, | |
| "learning_rate": 6.203369322287306e-05, | |
| "loss": 0.0283, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 2.207792207792208, | |
| "grad_norm": 0.3628130257129669, | |
| "learning_rate": 6.17660338354317e-05, | |
| "loss": 0.03, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 2.215909090909091, | |
| "grad_norm": 0.26702725887298584, | |
| "learning_rate": 6.149801702784456e-05, | |
| "loss": 0.028, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 2.224025974025974, | |
| "grad_norm": 0.297001451253891, | |
| "learning_rate": 6.122965094173424e-05, | |
| "loss": 0.0294, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 2.232142857142857, | |
| "grad_norm": 0.21826975047588348, | |
| "learning_rate": 6.0960943729333374e-05, | |
| "loss": 0.0225, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 2.24025974025974, | |
| "grad_norm": 0.2182624638080597, | |
| "learning_rate": 6.069190355323717e-05, | |
| "loss": 0.0276, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 2.2483766233766236, | |
| "grad_norm": 0.31292399764060974, | |
| "learning_rate": 6.042253858615532e-05, | |
| "loss": 0.0231, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 2.2564935064935066, | |
| "grad_norm": 0.2803086042404175, | |
| "learning_rate": 6.015285701066382e-05, | |
| "loss": 0.0255, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 2.2646103896103895, | |
| "grad_norm": 0.3240359425544739, | |
| "learning_rate": 5.988286701895631e-05, | |
| "loss": 0.0258, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 2.2727272727272725, | |
| "grad_norm": 0.5147755742073059, | |
| "learning_rate": 5.961257681259535e-05, | |
| "loss": 0.0296, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 2.280844155844156, | |
| "grad_norm": 0.3791831135749817, | |
| "learning_rate": 5.934199460226317e-05, | |
| "loss": 0.0298, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 2.288961038961039, | |
| "grad_norm": 0.3257302939891815, | |
| "learning_rate": 5.9071128607512285e-05, | |
| "loss": 0.0224, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 2.2970779220779223, | |
| "grad_norm": 0.30238914489746094, | |
| "learning_rate": 5.8799987056515804e-05, | |
| "loss": 0.0272, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 2.3051948051948052, | |
| "grad_norm": 0.31279265880584717, | |
| "learning_rate": 5.8528578185817514e-05, | |
| "loss": 0.0259, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 2.313311688311688, | |
| "grad_norm": 0.23632532358169556, | |
| "learning_rate": 5.825691024008162e-05, | |
| "loss": 0.0232, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 2.3214285714285716, | |
| "grad_norm": 0.4876987934112549, | |
| "learning_rate": 5.798499147184233e-05, | |
| "loss": 0.0302, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 2.3295454545454546, | |
| "grad_norm": 0.47043561935424805, | |
| "learning_rate": 5.771283014125317e-05, | |
| "loss": 0.0245, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 2.3376623376623376, | |
| "grad_norm": 0.2626848518848419, | |
| "learning_rate": 5.7440434515836064e-05, | |
| "loss": 0.0259, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 2.345779220779221, | |
| "grad_norm": 0.26216691732406616, | |
| "learning_rate": 5.7167812870230094e-05, | |
| "loss": 0.0241, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 2.353896103896104, | |
| "grad_norm": 0.3104308247566223, | |
| "learning_rate": 5.689497348594035e-05, | |
| "loss": 0.0274, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 2.362012987012987, | |
| "grad_norm": 0.32353267073631287, | |
| "learning_rate": 5.662192465108613e-05, | |
| "loss": 0.0346, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 2.3701298701298703, | |
| "grad_norm": 0.33068498969078064, | |
| "learning_rate": 5.634867466014932e-05, | |
| "loss": 0.024, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 2.3782467532467533, | |
| "grad_norm": 0.2560001015663147, | |
| "learning_rate": 5.607523181372234e-05, | |
| "loss": 0.0258, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 2.3863636363636362, | |
| "grad_norm": 0.3798504173755646, | |
| "learning_rate": 5.5801604418256117e-05, | |
| "loss": 0.0252, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 2.3944805194805197, | |
| "grad_norm": 0.28704068064689636, | |
| "learning_rate": 5.552780078580756e-05, | |
| "loss": 0.0249, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 2.4025974025974026, | |
| "grad_norm": 0.22857019305229187, | |
| "learning_rate": 5.525382923378728e-05, | |
| "loss": 0.0294, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 2.4107142857142856, | |
| "grad_norm": 0.27925705909729004, | |
| "learning_rate": 5.49796980847068e-05, | |
| "loss": 0.0245, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 2.418831168831169, | |
| "grad_norm": 0.259421706199646, | |
| "learning_rate": 5.470541566592573e-05, | |
| "loss": 0.0272, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 2.426948051948052, | |
| "grad_norm": 0.33721524477005005, | |
| "learning_rate": 5.443099030939887e-05, | |
| "loss": 0.0252, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 2.435064935064935, | |
| "grad_norm": 0.23205623030662537, | |
| "learning_rate": 5.415643035142309e-05, | |
| "loss": 0.0254, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 2.4431818181818183, | |
| "grad_norm": 0.21620187163352966, | |
| "learning_rate": 5.3881744132384104e-05, | |
| "loss": 0.0248, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 2.4512987012987013, | |
| "grad_norm": 0.3049747347831726, | |
| "learning_rate": 5.360693999650303e-05, | |
| "loss": 0.0301, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 2.4594155844155843, | |
| "grad_norm": 0.22451385855674744, | |
| "learning_rate": 5.3332026291583016e-05, | |
| "loss": 0.0234, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 2.4675324675324677, | |
| "grad_norm": 0.28510794043540955, | |
| "learning_rate": 5.305701136875566e-05, | |
| "loss": 0.024, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 2.4756493506493507, | |
| "grad_norm": 0.3264691233634949, | |
| "learning_rate": 5.278190358222721e-05, | |
| "loss": 0.0227, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 2.4837662337662336, | |
| "grad_norm": 0.3188452422618866, | |
| "learning_rate": 5.25067112890249e-05, | |
| "loss": 0.0305, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 2.491883116883117, | |
| "grad_norm": 0.33883917331695557, | |
| "learning_rate": 5.2231442848743064e-05, | |
| "loss": 0.0275, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.3571816384792328, | |
| "learning_rate": 5.1956106623289145e-05, | |
| "loss": 0.0259, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 2.508116883116883, | |
| "grad_norm": 0.28985506296157837, | |
| "learning_rate": 5.168071097662972e-05, | |
| "loss": 0.0297, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 2.5162337662337664, | |
| "grad_norm": 0.3391208052635193, | |
| "learning_rate": 5.1405264274536445e-05, | |
| "loss": 0.0273, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 2.5243506493506493, | |
| "grad_norm": 0.32588908076286316, | |
| "learning_rate": 5.112977488433188e-05, | |
| "loss": 0.0308, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 2.5324675324675323, | |
| "grad_norm": 0.22015531361103058, | |
| "learning_rate": 5.085425117463533e-05, | |
| "loss": 0.0252, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 2.5405844155844157, | |
| "grad_norm": 0.32106679677963257, | |
| "learning_rate": 5.057870151510864e-05, | |
| "loss": 0.0275, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 2.5487012987012987, | |
| "grad_norm": 0.27428922057151794, | |
| "learning_rate": 5.030313427620197e-05, | |
| "loss": 0.0269, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 2.5568181818181817, | |
| "grad_norm": 0.2819305956363678, | |
| "learning_rate": 5.0027557828899426e-05, | |
| "loss": 0.0266, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 2.564935064935065, | |
| "grad_norm": 0.48730844259262085, | |
| "learning_rate": 4.975198054446492e-05, | |
| "loss": 0.0259, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 2.573051948051948, | |
| "grad_norm": 0.2712593078613281, | |
| "learning_rate": 4.947641079418773e-05, | |
| "loss": 0.0271, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 2.5811688311688314, | |
| "grad_norm": 0.26792702078819275, | |
| "learning_rate": 4.920085694912828e-05, | |
| "loss": 0.0256, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 2.5892857142857144, | |
| "grad_norm": 0.2116631269454956, | |
| "learning_rate": 4.892532737986387e-05, | |
| "loss": 0.0241, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 2.5974025974025974, | |
| "grad_norm": 0.20508752763271332, | |
| "learning_rate": 4.864983045623434e-05, | |
| "loss": 0.029, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 2.6055194805194803, | |
| "grad_norm": 0.3375774323940277, | |
| "learning_rate": 4.837437454708784e-05, | |
| "loss": 0.0257, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 2.6136363636363638, | |
| "grad_norm": 0.275234580039978, | |
| "learning_rate": 4.809896802002662e-05, | |
| "loss": 0.0228, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 2.6217532467532467, | |
| "grad_norm": 0.33019763231277466, | |
| "learning_rate": 4.7823619241152854e-05, | |
| "loss": 0.0243, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 2.62987012987013, | |
| "grad_norm": 0.28387683629989624, | |
| "learning_rate": 4.754833657481445e-05, | |
| "loss": 0.02, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 2.637987012987013, | |
| "grad_norm": 0.29118308424949646, | |
| "learning_rate": 4.7273128383351015e-05, | |
| "loss": 0.0213, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 2.646103896103896, | |
| "grad_norm": 0.2830529808998108, | |
| "learning_rate": 4.699800302683981e-05, | |
| "loss": 0.03, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 2.654220779220779, | |
| "grad_norm": 0.30629491806030273, | |
| "learning_rate": 4.6722968862841806e-05, | |
| "loss": 0.023, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 2.6623376623376624, | |
| "grad_norm": 0.22736108303070068, | |
| "learning_rate": 4.6448034246147754e-05, | |
| "loss": 0.0242, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 2.6704545454545454, | |
| "grad_norm": 0.30054470896720886, | |
| "learning_rate": 4.6173207528524476e-05, | |
| "loss": 0.0266, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 2.678571428571429, | |
| "grad_norm": 0.1740064173936844, | |
| "learning_rate": 4.58984970584611e-05, | |
| "loss": 0.0195, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 2.686688311688312, | |
| "grad_norm": 0.26478099822998047, | |
| "learning_rate": 4.562391118091544e-05, | |
| "loss": 0.0238, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 2.6948051948051948, | |
| "grad_norm": 0.25808119773864746, | |
| "learning_rate": 4.534945823706056e-05, | |
| "loss": 0.0214, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 2.7029220779220777, | |
| "grad_norm": 0.2761845588684082, | |
| "learning_rate": 4.507514656403137e-05, | |
| "loss": 0.0245, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 2.711038961038961, | |
| "grad_norm": 0.27152833342552185, | |
| "learning_rate": 4.480098449467132e-05, | |
| "loss": 0.0227, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 2.719155844155844, | |
| "grad_norm": 0.1654919534921646, | |
| "learning_rate": 4.452698035727929e-05, | |
| "loss": 0.023, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 2.7272727272727275, | |
| "grad_norm": 0.28704938292503357, | |
| "learning_rate": 4.425314247535668e-05, | |
| "loss": 0.0246, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 2.7353896103896105, | |
| "grad_norm": 0.2609517276287079, | |
| "learning_rate": 4.3979479167354477e-05, | |
| "loss": 0.0254, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 2.7435064935064934, | |
| "grad_norm": 0.16841895878314972, | |
| "learning_rate": 4.370599874642055e-05, | |
| "loss": 0.0225, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 2.7516233766233764, | |
| "grad_norm": 0.2550669014453888, | |
| "learning_rate": 4.3432709520147205e-05, | |
| "loss": 0.0227, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 2.75974025974026, | |
| "grad_norm": 0.29165688157081604, | |
| "learning_rate": 4.315961979031875e-05, | |
| "loss": 0.0276, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 2.767857142857143, | |
| "grad_norm": 0.2834565043449402, | |
| "learning_rate": 4.2886737852659325e-05, | |
| "loss": 0.0226, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 2.775974025974026, | |
| "grad_norm": 0.3727666139602661, | |
| "learning_rate": 4.261407199658093e-05, | |
| "loss": 0.0272, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 2.784090909090909, | |
| "grad_norm": 0.34494835138320923, | |
| "learning_rate": 4.234163050493158e-05, | |
| "loss": 0.0205, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 2.792207792207792, | |
| "grad_norm": 0.2529587745666504, | |
| "learning_rate": 4.2069421653743706e-05, | |
| "loss": 0.0208, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 2.800324675324675, | |
| "grad_norm": 0.3394658863544464, | |
| "learning_rate": 4.179745371198276e-05, | |
| "loss": 0.0262, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 2.8084415584415585, | |
| "grad_norm": 0.22549283504486084, | |
| "learning_rate": 4.1525734941296026e-05, | |
| "loss": 0.0261, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 2.8165584415584415, | |
| "grad_norm": 0.2669041156768799, | |
| "learning_rate": 4.125427359576162e-05, | |
| "loss": 0.0212, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 2.824675324675325, | |
| "grad_norm": 0.26632052659988403, | |
| "learning_rate": 4.0983077921637815e-05, | |
| "loss": 0.0228, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 2.832792207792208, | |
| "grad_norm": 0.20987533032894135, | |
| "learning_rate": 4.07121561571125e-05, | |
| "loss": 0.0262, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 2.840909090909091, | |
| "grad_norm": 0.42086461186408997, | |
| "learning_rate": 4.044151653205292e-05, | |
| "loss": 0.0265, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 2.849025974025974, | |
| "grad_norm": 0.23293422162532806, | |
| "learning_rate": 4.0171167267755696e-05, | |
| "loss": 0.0273, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 2.857142857142857, | |
| "grad_norm": 0.21170319616794586, | |
| "learning_rate": 3.9901116576697083e-05, | |
| "loss": 0.0225, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 2.86525974025974, | |
| "grad_norm": 0.26338985562324524, | |
| "learning_rate": 3.963137266228349e-05, | |
| "loss": 0.0188, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 2.8733766233766236, | |
| "grad_norm": 0.2753160297870636, | |
| "learning_rate": 3.93619437186023e-05, | |
| "loss": 0.0214, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 2.8814935064935066, | |
| "grad_norm": 0.2192564606666565, | |
| "learning_rate": 3.9092837930172884e-05, | |
| "loss": 0.0258, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 2.8896103896103895, | |
| "grad_norm": 0.1939588338136673, | |
| "learning_rate": 3.8824063471698105e-05, | |
| "loss": 0.0215, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 2.8977272727272725, | |
| "grad_norm": 0.3316612243652344, | |
| "learning_rate": 3.855562850781589e-05, | |
| "loss": 0.028, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 2.905844155844156, | |
| "grad_norm": 0.35597196221351624, | |
| "learning_rate": 3.828754119285123e-05, | |
| "loss": 0.0247, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 2.913961038961039, | |
| "grad_norm": 0.17957353591918945, | |
| "learning_rate": 3.801980967056851e-05, | |
| "loss": 0.0243, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 2.9220779220779223, | |
| "grad_norm": 0.2684866189956665, | |
| "learning_rate": 3.77524420739241e-05, | |
| "loss": 0.024, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 2.9301948051948052, | |
| "grad_norm": 0.27119967341423035, | |
| "learning_rate": 3.748544652481927e-05, | |
| "loss": 0.0223, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 2.938311688311688, | |
| "grad_norm": 0.2573845684528351, | |
| "learning_rate": 3.721883113385353e-05, | |
| "loss": 0.0224, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 2.946428571428571, | |
| "grad_norm": 0.22625279426574707, | |
| "learning_rate": 3.695260400007819e-05, | |
| "loss": 0.025, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 2.9545454545454546, | |
| "grad_norm": 0.254618376493454, | |
| "learning_rate": 3.6686773210750385e-05, | |
| "loss": 0.0242, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 2.9626623376623376, | |
| "grad_norm": 0.2085765153169632, | |
| "learning_rate": 3.642134684108737e-05, | |
| "loss": 0.0216, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 2.970779220779221, | |
| "grad_norm": 0.27070721983909607, | |
| "learning_rate": 3.615633295402123e-05, | |
| "loss": 0.0215, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 2.978896103896104, | |
| "grad_norm": 0.43438518047332764, | |
| "learning_rate": 3.5891739599953945e-05, | |
| "loss": 0.0232, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 2.987012987012987, | |
| "grad_norm": 0.2724141776561737, | |
| "learning_rate": 3.5627574816512846e-05, | |
| "loss": 0.022, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 2.99512987012987, | |
| "grad_norm": 0.2144225537776947, | |
| "learning_rate": 3.536384662830648e-05, | |
| "loss": 0.0222, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 3.0032467532467533, | |
| "grad_norm": 0.3140086531639099, | |
| "learning_rate": 3.5100563046680764e-05, | |
| "loss": 0.0274, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 3.0113636363636362, | |
| "grad_norm": 0.23359528183937073, | |
| "learning_rate": 3.483773206947572e-05, | |
| "loss": 0.0267, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 3.0194805194805197, | |
| "grad_norm": 0.26310062408447266, | |
| "learning_rate": 3.457536168078247e-05, | |
| "loss": 0.0266, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 3.0275974025974026, | |
| "grad_norm": 0.29639682173728943, | |
| "learning_rate": 3.431345985070067e-05, | |
| "loss": 0.0204, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 3.0357142857142856, | |
| "grad_norm": 0.2499479353427887, | |
| "learning_rate": 3.40520345350965e-05, | |
| "loss": 0.0226, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 3.043831168831169, | |
| "grad_norm": 0.2562311887741089, | |
| "learning_rate": 3.379109367536089e-05, | |
| "loss": 0.0217, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 3.051948051948052, | |
| "grad_norm": 0.27222245931625366, | |
| "learning_rate": 3.3530645198168295e-05, | |
| "loss": 0.0233, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 3.060064935064935, | |
| "grad_norm": 0.37355130910873413, | |
| "learning_rate": 3.327069701523595e-05, | |
| "loss": 0.0299, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 3.0681818181818183, | |
| "grad_norm": 0.2403850555419922, | |
| "learning_rate": 3.301125702308353e-05, | |
| "loss": 0.0222, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 3.0762987012987013, | |
| "grad_norm": 0.1968202441930771, | |
| "learning_rate": 3.275233310279321e-05, | |
| "loss": 0.0203, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 3.0844155844155843, | |
| "grad_norm": 0.23642730712890625, | |
| "learning_rate": 3.249393311977037e-05, | |
| "loss": 0.0229, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 3.0925324675324677, | |
| "grad_norm": 0.19872798025608063, | |
| "learning_rate": 3.223606492350451e-05, | |
| "loss": 0.0208, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 3.1006493506493507, | |
| "grad_norm": 0.20454058051109314, | |
| "learning_rate": 3.197873634733096e-05, | |
| "loss": 0.0242, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 3.1087662337662336, | |
| "grad_norm": 0.31078484654426575, | |
| "learning_rate": 3.172195520819285e-05, | |
| "loss": 0.0236, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 3.116883116883117, | |
| "grad_norm": 0.3581089377403259, | |
| "learning_rate": 3.146572930640362e-05, | |
| "loss": 0.0216, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 3.125, | |
| "grad_norm": 0.2517269551753998, | |
| "learning_rate": 3.121006642541014e-05, | |
| "loss": 0.0183, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 3.133116883116883, | |
| "grad_norm": 0.26109376549720764, | |
| "learning_rate": 3.095497433155626e-05, | |
| "loss": 0.0242, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 3.1412337662337664, | |
| "grad_norm": 0.20480413734912872, | |
| "learning_rate": 3.070046077384682e-05, | |
| "loss": 0.0202, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 3.1493506493506493, | |
| "grad_norm": 0.2771162986755371, | |
| "learning_rate": 3.0446533483712304e-05, | |
| "loss": 0.0231, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 3.1574675324675323, | |
| "grad_norm": 0.16268500685691833, | |
| "learning_rate": 3.0193200174774038e-05, | |
| "loss": 0.0173, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 3.1655844155844157, | |
| "grad_norm": 0.2741657793521881, | |
| "learning_rate": 2.994046854260974e-05, | |
| "loss": 0.0241, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 3.1737012987012987, | |
| "grad_norm": 0.38963595032691956, | |
| "learning_rate": 2.9688346264519866e-05, | |
| "loss": 0.019, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 3.1818181818181817, | |
| "grad_norm": 0.36379268765449524, | |
| "learning_rate": 2.943684099929436e-05, | |
| "loss": 0.0218, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 3.189935064935065, | |
| "grad_norm": 0.34124332666397095, | |
| "learning_rate": 2.918596038697995e-05, | |
| "loss": 0.0185, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 3.198051948051948, | |
| "grad_norm": 0.25402340292930603, | |
| "learning_rate": 2.8935712048648112e-05, | |
| "loss": 0.0204, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 3.206168831168831, | |
| "grad_norm": 0.2678219676017761, | |
| "learning_rate": 2.8686103586163626e-05, | |
| "loss": 0.0212, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 3.2142857142857144, | |
| "grad_norm": 0.2375226765871048, | |
| "learning_rate": 2.843714258195346e-05, | |
| "loss": 0.0211, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 3.2224025974025974, | |
| "grad_norm": 0.22339288890361786, | |
| "learning_rate": 2.8188836598776662e-05, | |
| "loss": 0.0194, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 3.2305194805194803, | |
| "grad_norm": 0.25037524104118347, | |
| "learning_rate": 2.7941193179494484e-05, | |
| "loss": 0.0205, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 3.2386363636363638, | |
| "grad_norm": 0.19681259989738464, | |
| "learning_rate": 2.7694219846841262e-05, | |
| "loss": 0.0197, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 3.2467532467532467, | |
| "grad_norm": 0.2873547375202179, | |
| "learning_rate": 2.7447924103195976e-05, | |
| "loss": 0.0212, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 3.2548701298701297, | |
| "grad_norm": 0.33498382568359375, | |
| "learning_rate": 2.7202313430354253e-05, | |
| "loss": 0.0202, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 3.262987012987013, | |
| "grad_norm": 0.2162805050611496, | |
| "learning_rate": 2.695739528930111e-05, | |
| "loss": 0.0179, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 3.271103896103896, | |
| "grad_norm": 0.25468140840530396, | |
| "learning_rate": 2.67131771199844e-05, | |
| "loss": 0.02, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 3.279220779220779, | |
| "grad_norm": 0.21498627960681915, | |
| "learning_rate": 2.6469666341088677e-05, | |
| "loss": 0.0243, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 3.2873376623376624, | |
| "grad_norm": 0.3046325445175171, | |
| "learning_rate": 2.6226870349809885e-05, | |
| "loss": 0.0197, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 3.2954545454545454, | |
| "grad_norm": 0.2851937413215637, | |
| "learning_rate": 2.5984796521630737e-05, | |
| "loss": 0.0233, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 3.3035714285714284, | |
| "grad_norm": 0.2599593997001648, | |
| "learning_rate": 2.574345221009653e-05, | |
| "loss": 0.0207, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 3.311688311688312, | |
| "grad_norm": 0.19292496144771576, | |
| "learning_rate": 2.5502844746591804e-05, | |
| "loss": 0.0184, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 3.3198051948051948, | |
| "grad_norm": 0.2586632966995239, | |
| "learning_rate": 2.526298144011775e-05, | |
| "loss": 0.0184, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 3.3279220779220777, | |
| "grad_norm": 0.2998685836791992, | |
| "learning_rate": 2.5023869577070013e-05, | |
| "loss": 0.0225, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 3.336038961038961, | |
| "grad_norm": 0.3807864189147949, | |
| "learning_rate": 2.478551642101743e-05, | |
| "loss": 0.0193, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 3.344155844155844, | |
| "grad_norm": 0.2566285729408264, | |
| "learning_rate": 2.4547929212481435e-05, | |
| "loss": 0.0191, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 3.3522727272727275, | |
| "grad_norm": 0.17539237439632416, | |
| "learning_rate": 2.4311115168716013e-05, | |
| "loss": 0.0208, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 3.3603896103896105, | |
| "grad_norm": 0.18529631197452545, | |
| "learning_rate": 2.4075081483488494e-05, | |
| "loss": 0.0191, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 3.3685064935064934, | |
| "grad_norm": 0.19923549890518188, | |
| "learning_rate": 2.3839835326861104e-05, | |
| "loss": 0.0213, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 3.3766233766233764, | |
| "grad_norm": 0.2135431319475174, | |
| "learning_rate": 2.3605383844972966e-05, | |
| "loss": 0.0129, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 3.38474025974026, | |
| "grad_norm": 0.3140435814857483, | |
| "learning_rate": 2.3371734159823284e-05, | |
| "loss": 0.0233, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 3.392857142857143, | |
| "grad_norm": 0.18021227419376373, | |
| "learning_rate": 2.3138893369054766e-05, | |
| "loss": 0.0186, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 3.400974025974026, | |
| "grad_norm": 0.2429431974887848, | |
| "learning_rate": 2.2906868545738102e-05, | |
| "loss": 0.0241, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 3.409090909090909, | |
| "grad_norm": 0.15611667931079865, | |
| "learning_rate": 2.2675666738157186e-05, | |
| "loss": 0.0177, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 3.417207792207792, | |
| "grad_norm": 0.30634909868240356, | |
| "learning_rate": 2.2445294969594844e-05, | |
| "loss": 0.0154, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 3.425324675324675, | |
| "grad_norm": 0.23167884349822998, | |
| "learning_rate": 2.22157602381196e-05, | |
| "loss": 0.015, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 3.4334415584415585, | |
| "grad_norm": 0.325353741645813, | |
| "learning_rate": 2.1987069516373098e-05, | |
| "loss": 0.0144, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 3.4415584415584415, | |
| "grad_norm": 0.21330159902572632, | |
| "learning_rate": 2.1759229751358217e-05, | |
| "loss": 0.0183, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 3.449675324675325, | |
| "grad_norm": 0.3478533625602722, | |
| "learning_rate": 2.1532247864228084e-05, | |
| "loss": 0.02, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 3.457792207792208, | |
| "grad_norm": 0.1727161556482315, | |
| "learning_rate": 2.1306130750075865e-05, | |
| "loss": 0.0249, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 3.465909090909091, | |
| "grad_norm": 0.19407041370868683, | |
| "learning_rate": 2.1080885277725236e-05, | |
| "loss": 0.0174, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 3.474025974025974, | |
| "grad_norm": 0.35378298163414, | |
| "learning_rate": 2.085651828952175e-05, | |
| "loss": 0.0193, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 3.482142857142857, | |
| "grad_norm": 0.2549465000629425, | |
| "learning_rate": 2.063303660112506e-05, | |
| "loss": 0.0163, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 3.49025974025974, | |
| "grad_norm": 0.2552514970302582, | |
| "learning_rate": 2.0410447001301753e-05, | |
| "loss": 0.0171, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 3.4983766233766236, | |
| "grad_norm": 0.2298896312713623, | |
| "learning_rate": 2.0188756251719203e-05, | |
| "loss": 0.0167, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 3.5064935064935066, | |
| "grad_norm": 0.23686964809894562, | |
| "learning_rate": 1.9967971086740195e-05, | |
| "loss": 0.0193, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 3.5146103896103895, | |
| "grad_norm": 0.2347656786441803, | |
| "learning_rate": 1.974809821321827e-05, | |
| "loss": 0.016, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 3.5227272727272725, | |
| "grad_norm": 0.2654639780521393, | |
| "learning_rate": 1.9529144310294023e-05, | |
| "loss": 0.0152, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 3.530844155844156, | |
| "grad_norm": 0.1813756227493286, | |
| "learning_rate": 1.9311116029192278e-05, | |
| "loss": 0.0171, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 3.538961038961039, | |
| "grad_norm": 0.2936214804649353, | |
| "learning_rate": 1.909401999301993e-05, | |
| "loss": 0.0162, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 3.5470779220779223, | |
| "grad_norm": 0.21046514809131622, | |
| "learning_rate": 1.887786279656482e-05, | |
| "loss": 0.0166, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 3.5551948051948052, | |
| "grad_norm": 0.3618803918361664, | |
| "learning_rate": 1.8662651006095387e-05, | |
| "loss": 0.0156, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 3.563311688311688, | |
| "grad_norm": 0.23544606566429138, | |
| "learning_rate": 1.8448391159161204e-05, | |
| "loss": 0.0183, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 3.571428571428571, | |
| "grad_norm": 0.28176313638687134, | |
| "learning_rate": 1.8235089764394408e-05, | |
| "loss": 0.023, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 3.5795454545454546, | |
| "grad_norm": 0.26535889506340027, | |
| "learning_rate": 1.8022753301311935e-05, | |
| "loss": 0.0213, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 3.5876623376623376, | |
| "grad_norm": 0.25943446159362793, | |
| "learning_rate": 1.7811388220118707e-05, | |
| "loss": 0.0175, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 3.595779220779221, | |
| "grad_norm": 0.15602117776870728, | |
| "learning_rate": 1.7601000941511757e-05, | |
| "loss": 0.0173, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 3.603896103896104, | |
| "grad_norm": 0.2504715919494629, | |
| "learning_rate": 1.7391597856485083e-05, | |
| "loss": 0.023, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 3.612012987012987, | |
| "grad_norm": 0.1567242443561554, | |
| "learning_rate": 1.7183185326135543e-05, | |
| "loss": 0.0167, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 3.62012987012987, | |
| "grad_norm": 0.268656462430954, | |
| "learning_rate": 1.6975769681469705e-05, | |
| "loss": 0.018, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 3.6282467532467533, | |
| "grad_norm": 0.37558749318122864, | |
| "learning_rate": 1.676935722321139e-05, | |
| "loss": 0.0174, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 3.6363636363636362, | |
| "grad_norm": 0.1570769101381302, | |
| "learning_rate": 1.6563954221610355e-05, | |
| "loss": 0.0175, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 3.6444805194805197, | |
| "grad_norm": 0.19073347747325897, | |
| "learning_rate": 1.6359566916251845e-05, | |
| "loss": 0.018, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 3.6525974025974026, | |
| "grad_norm": 0.17310558259487152, | |
| "learning_rate": 1.615620151586697e-05, | |
| "loss": 0.0168, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 3.6607142857142856, | |
| "grad_norm": 0.31775960326194763, | |
| "learning_rate": 1.5953864198144135e-05, | |
| "loss": 0.02, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 3.6688311688311686, | |
| "grad_norm": 0.1850634217262268, | |
| "learning_rate": 1.5752561109541447e-05, | |
| "loss": 0.0195, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 3.676948051948052, | |
| "grad_norm": 0.21364572644233704, | |
| "learning_rate": 1.5552298365099882e-05, | |
| "loss": 0.017, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 3.685064935064935, | |
| "grad_norm": 0.2515982687473297, | |
| "learning_rate": 1.5353082048257596e-05, | |
| "loss": 0.0148, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 3.6931818181818183, | |
| "grad_norm": 0.21852737665176392, | |
| "learning_rate": 1.5154918210665148e-05, | |
| "loss": 0.0166, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 3.7012987012987013, | |
| "grad_norm": 0.1786801964044571, | |
| "learning_rate": 1.4957812872001614e-05, | |
| "loss": 0.0138, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 3.7094155844155843, | |
| "grad_norm": 0.14969338476657867, | |
| "learning_rate": 1.4761772019791748e-05, | |
| "loss": 0.0181, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 3.7175324675324677, | |
| "grad_norm": 0.21216048300266266, | |
| "learning_rate": 1.4566801609224096e-05, | |
| "loss": 0.017, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 3.7256493506493507, | |
| "grad_norm": 0.16176529228687286, | |
| "learning_rate": 1.4372907562970079e-05, | |
| "loss": 0.016, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 3.7337662337662336, | |
| "grad_norm": 0.25220224261283875, | |
| "learning_rate": 1.4180095771004154e-05, | |
| "loss": 0.0148, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 3.741883116883117, | |
| "grad_norm": 0.1625714898109436, | |
| "learning_rate": 1.3988372090424773e-05, | |
| "loss": 0.0192, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "grad_norm": 0.26604965329170227, | |
| "learning_rate": 1.3797742345276521e-05, | |
| "loss": 0.017, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 3.758116883116883, | |
| "grad_norm": 0.27260708808898926, | |
| "learning_rate": 1.3608212326373249e-05, | |
| "loss": 0.0176, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 3.7662337662337664, | |
| "grad_norm": 0.15837593376636505, | |
| "learning_rate": 1.3419787791122062e-05, | |
| "loss": 0.0142, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 3.7743506493506493, | |
| "grad_norm": 0.18480677902698517, | |
| "learning_rate": 1.323247446334847e-05, | |
| "loss": 0.019, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 3.7824675324675323, | |
| "grad_norm": 0.2447320967912674, | |
| "learning_rate": 1.3046278033122577e-05, | |
| "loss": 0.0244, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 3.7905844155844157, | |
| "grad_norm": 0.22955277562141418, | |
| "learning_rate": 1.286120415658611e-05, | |
| "loss": 0.0196, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 3.7987012987012987, | |
| "grad_norm": 0.15361738204956055, | |
| "learning_rate": 1.2677258455780683e-05, | |
| "loss": 0.0152, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 3.8068181818181817, | |
| "grad_norm": 0.2417902648448944, | |
| "learning_rate": 1.2494446518477022e-05, | |
| "loss": 0.0164, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 3.814935064935065, | |
| "grad_norm": 0.18717902898788452, | |
| "learning_rate": 1.2312773898005175e-05, | |
| "loss": 0.0178, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 3.823051948051948, | |
| "grad_norm": 0.20804652571678162, | |
| "learning_rate": 1.2132246113085822e-05, | |
| "loss": 0.0171, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 3.8311688311688314, | |
| "grad_norm": 0.2959846258163452, | |
| "learning_rate": 1.1952868647662696e-05, | |
| "loss": 0.0161, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 3.8392857142857144, | |
| "grad_norm": 0.15556474030017853, | |
| "learning_rate": 1.1774646950735913e-05, | |
| "loss": 0.0164, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 3.8474025974025974, | |
| "grad_norm": 0.24023498594760895, | |
| "learning_rate": 1.1597586436196473e-05, | |
| "loss": 0.0136, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 3.8555194805194803, | |
| "grad_norm": 0.2286766916513443, | |
| "learning_rate": 1.1421692482661856e-05, | |
| "loss": 0.015, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 3.8636363636363638, | |
| "grad_norm": 0.191964790225029, | |
| "learning_rate": 1.124697043331256e-05, | |
| "loss": 0.0157, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 3.8717532467532467, | |
| "grad_norm": 0.17993806302547455, | |
| "learning_rate": 1.107342559572977e-05, | |
| "loss": 0.0187, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 3.87987012987013, | |
| "grad_norm": 0.20845425128936768, | |
| "learning_rate": 1.090106324173426e-05, | |
| "loss": 0.0194, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 3.887987012987013, | |
| "grad_norm": 0.20088578760623932, | |
| "learning_rate": 1.0729888607226113e-05, | |
| "loss": 0.0146, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 3.896103896103896, | |
| "grad_norm": 0.24833039939403534, | |
| "learning_rate": 1.0559906892025745e-05, | |
| "loss": 0.0177, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 3.904220779220779, | |
| "grad_norm": 0.18850843608379364, | |
| "learning_rate": 1.0391123259715906e-05, | |
| "loss": 0.0142, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 3.9123376623376624, | |
| "grad_norm": 0.1293126493692398, | |
| "learning_rate": 1.0223542837484839e-05, | |
| "loss": 0.012, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 3.9204545454545454, | |
| "grad_norm": 0.18274357914924622, | |
| "learning_rate": 1.0057170715970559e-05, | |
| "loss": 0.0162, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 3.928571428571429, | |
| "grad_norm": 0.1782849133014679, | |
| "learning_rate": 9.892011949106172e-06, | |
| "loss": 0.0295, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 3.936688311688312, | |
| "grad_norm": 0.40481868386268616, | |
| "learning_rate": 9.728071553966339e-06, | |
| "loss": 0.0174, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 3.9448051948051948, | |
| "grad_norm": 0.20304250717163086, | |
| "learning_rate": 9.56535451061496e-06, | |
| "loss": 0.0138, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 3.9529220779220777, | |
| "grad_norm": 0.22350141406059265, | |
| "learning_rate": 9.403865761953779e-06, | |
| "loss": 0.0212, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 3.961038961038961, | |
| "grad_norm": 0.28146418929100037, | |
| "learning_rate": 9.243610213572285e-06, | |
| "loss": 0.0194, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 3.969155844155844, | |
| "grad_norm": 0.2405545562505722, | |
| "learning_rate": 9.084592733598735e-06, | |
| "loss": 0.0129, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 3.9772727272727275, | |
| "grad_norm": 0.27352192997932434, | |
| "learning_rate": 8.92681815255219e-06, | |
| "loss": 0.0174, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 3.9853896103896105, | |
| "grad_norm": 0.1327618509531021, | |
| "learning_rate": 8.770291263195819e-06, | |
| "loss": 0.0176, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 3.9935064935064934, | |
| "grad_norm": 0.1761268973350525, | |
| "learning_rate": 8.615016820391342e-06, | |
| "loss": 0.0119, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 4.001623376623376, | |
| "grad_norm": 0.35601338744163513, | |
| "learning_rate": 8.460999540954517e-06, | |
| "loss": 0.0158, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 4.009740259740259, | |
| "grad_norm": 0.17832595109939575, | |
| "learning_rate": 8.308244103511909e-06, | |
| "loss": 0.015, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 4.017857142857143, | |
| "grad_norm": 0.15967492759227753, | |
| "learning_rate": 8.156755148358764e-06, | |
| "loss": 0.0156, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 4.025974025974026, | |
| "grad_norm": 0.17017978429794312, | |
| "learning_rate": 8.00653727731801e-06, | |
| "loss": 0.0147, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 4.034090909090909, | |
| "grad_norm": 0.1537715643644333, | |
| "learning_rate": 7.857595053600513e-06, | |
| "loss": 0.0185, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 4.042207792207792, | |
| "grad_norm": 0.2602587938308716, | |
| "learning_rate": 7.709933001666431e-06, | |
| "loss": 0.021, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 4.050324675324675, | |
| "grad_norm": 0.2224591225385666, | |
| "learning_rate": 7.56355560708778e-06, | |
| "loss": 0.0131, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 4.058441558441558, | |
| "grad_norm": 0.3965739905834198, | |
| "learning_rate": 7.418467316412158e-06, | |
| "loss": 0.0173, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 4.066558441558442, | |
| "grad_norm": 0.20591789484024048, | |
| "learning_rate": 7.2746725370277435e-06, | |
| "loss": 0.0154, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 4.074675324675325, | |
| "grad_norm": 0.19456496834754944, | |
| "learning_rate": 7.132175637029293e-06, | |
| "loss": 0.017, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 4.082792207792208, | |
| "grad_norm": 0.226767435669899, | |
| "learning_rate": 6.9909809450855345e-06, | |
| "loss": 0.0169, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 4.090909090909091, | |
| "grad_norm": 0.2061874121427536, | |
| "learning_rate": 6.851092750307686e-06, | |
| "loss": 0.0153, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 4.099025974025974, | |
| "grad_norm": 0.16970008611679077, | |
| "learning_rate": 6.712515302119077e-06, | |
| "loss": 0.0149, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 4.107142857142857, | |
| "grad_norm": 0.16201592981815338, | |
| "learning_rate": 6.575252810126143e-06, | |
| "loss": 0.0125, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 4.115259740259741, | |
| "grad_norm": 0.2043023258447647, | |
| "learning_rate": 6.439309443990532e-06, | |
| "loss": 0.0145, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 4.123376623376624, | |
| "grad_norm": 0.19296449422836304, | |
| "learning_rate": 6.304689333302416e-06, | |
| "loss": 0.0146, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 4.1314935064935066, | |
| "grad_norm": 0.22057557106018066, | |
| "learning_rate": 6.171396567455051e-06, | |
| "loss": 0.0159, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 4.1396103896103895, | |
| "grad_norm": 0.15243513882160187, | |
| "learning_rate": 6.039435195520604e-06, | |
| "loss": 0.0179, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 4.1477272727272725, | |
| "grad_norm": 0.11597718298435211, | |
| "learning_rate": 5.908809226127054e-06, | |
| "loss": 0.0135, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 4.1558441558441555, | |
| "grad_norm": 0.35636165738105774, | |
| "learning_rate": 5.779522627336537e-06, | |
| "loss": 0.0182, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 4.163961038961039, | |
| "grad_norm": 0.2736373841762543, | |
| "learning_rate": 5.651579326524709e-06, | |
| "loss": 0.0139, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 4.172077922077922, | |
| "grad_norm": 0.15613017976284027, | |
| "learning_rate": 5.524983210261481e-06, | |
| "loss": 0.0139, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 4.180194805194805, | |
| "grad_norm": 0.26912152767181396, | |
| "learning_rate": 5.399738124192988e-06, | |
| "loss": 0.0146, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 4.188311688311688, | |
| "grad_norm": 0.22851672768592834, | |
| "learning_rate": 5.2758478729247164e-06, | |
| "loss": 0.0146, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 4.196428571428571, | |
| "grad_norm": 0.1734984815120697, | |
| "learning_rate": 5.153316219905946e-06, | |
| "loss": 0.0119, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 4.204545454545454, | |
| "grad_norm": 0.12929925322532654, | |
| "learning_rate": 5.032146887315448e-06, | |
| "loss": 0.0117, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 4.212662337662338, | |
| "grad_norm": 0.25070181488990784, | |
| "learning_rate": 4.91234355594839e-06, | |
| "loss": 0.016, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 4.220779220779221, | |
| "grad_norm": 0.12695631384849548, | |
| "learning_rate": 4.7939098651045235e-06, | |
| "loss": 0.0177, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 4.228896103896104, | |
| "grad_norm": 0.18711066246032715, | |
| "learning_rate": 4.67684941247768e-06, | |
| "loss": 0.0148, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 4.237012987012987, | |
| "grad_norm": 0.1875857561826706, | |
| "learning_rate": 4.5611657540464036e-06, | |
| "loss": 0.0153, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 4.24512987012987, | |
| "grad_norm": 0.18307802081108093, | |
| "learning_rate": 4.446862403965984e-06, | |
| "loss": 0.0147, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 4.253246753246753, | |
| "grad_norm": 0.29160308837890625, | |
| "learning_rate": 4.333942834461702e-06, | |
| "loss": 0.0135, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 4.261363636363637, | |
| "grad_norm": 0.26900240778923035, | |
| "learning_rate": 4.222410475723326e-06, | |
| "loss": 0.0128, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 4.26948051948052, | |
| "grad_norm": 0.22926048934459686, | |
| "learning_rate": 4.112268715800943e-06, | |
| "loss": 0.0141, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 4.277597402597403, | |
| "grad_norm": 0.21062804758548737, | |
| "learning_rate": 4.003520900502028e-06, | |
| "loss": 0.0177, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 4.285714285714286, | |
| "grad_norm": 0.16253219544887543, | |
| "learning_rate": 3.8961703332898e-06, | |
| "loss": 0.0144, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 4.2938311688311686, | |
| "grad_norm": 0.29484206438064575, | |
| "learning_rate": 3.790220275182854e-06, | |
| "loss": 0.0135, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 4.301948051948052, | |
| "grad_norm": 0.20472891628742218, | |
| "learning_rate": 3.685673944656176e-06, | |
| "loss": 0.0137, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 4.310064935064935, | |
| "grad_norm": 0.20979756116867065, | |
| "learning_rate": 3.582534517543268e-06, | |
| "loss": 0.015, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 4.318181818181818, | |
| "grad_norm": 0.19138938188552856, | |
| "learning_rate": 3.4808051269397512e-06, | |
| "loss": 0.0142, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 4.326298701298701, | |
| "grad_norm": 0.1692737489938736, | |
| "learning_rate": 3.380488863108183e-06, | |
| "loss": 0.014, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 4.334415584415584, | |
| "grad_norm": 0.22084228694438934, | |
| "learning_rate": 3.2815887733841365e-06, | |
| "loss": 0.0143, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 4.342532467532467, | |
| "grad_norm": 0.12135329097509384, | |
| "learning_rate": 3.1841078620836683e-06, | |
| "loss": 0.0126, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 4.35064935064935, | |
| "grad_norm": 0.2081880420446396, | |
| "learning_rate": 3.0880490904120874e-06, | |
| "loss": 0.0148, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 4.358766233766234, | |
| "grad_norm": 0.21467359364032745, | |
| "learning_rate": 2.9934153763739205e-06, | |
| "loss": 0.0175, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 4.366883116883117, | |
| "grad_norm": 0.14720773696899414, | |
| "learning_rate": 2.9002095946843277e-06, | |
| "loss": 0.014, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 4.375, | |
| "grad_norm": 0.18871958553791046, | |
| "learning_rate": 2.8084345766817676e-06, | |
| "loss": 0.0123, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 4.383116883116883, | |
| "grad_norm": 0.19331897795200348, | |
| "learning_rate": 2.718093110241976e-06, | |
| "loss": 0.0125, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 4.391233766233766, | |
| "grad_norm": 0.23791873455047607, | |
| "learning_rate": 2.6291879396933004e-06, | |
| "loss": 0.0154, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 4.39935064935065, | |
| "grad_norm": 0.16598720848560333, | |
| "learning_rate": 2.541721765733318e-06, | |
| "loss": 0.0117, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 4.407467532467533, | |
| "grad_norm": 0.14102214574813843, | |
| "learning_rate": 2.455697245346783e-06, | |
| "loss": 0.0137, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 4.415584415584416, | |
| "grad_norm": 0.12769535183906555, | |
| "learning_rate": 2.371116991724953e-06, | |
| "loss": 0.0157, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 4.423701298701299, | |
| "grad_norm": 0.12857921421527863, | |
| "learning_rate": 2.2879835741861586e-06, | |
| "loss": 0.0196, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 4.431818181818182, | |
| "grad_norm": 0.30980998277664185, | |
| "learning_rate": 2.206299518097804e-06, | |
| "loss": 0.0117, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 4.439935064935065, | |
| "grad_norm": 0.2178056240081787, | |
| "learning_rate": 2.1260673047996227e-06, | |
| "loss": 0.0173, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 4.448051948051948, | |
| "grad_norm": 0.15620489418506622, | |
| "learning_rate": 2.047289371528299e-06, | |
| "loss": 0.0104, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 4.4561688311688314, | |
| "grad_norm": 0.1251637190580368, | |
| "learning_rate": 1.96996811134344e-06, | |
| "loss": 0.0136, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 4.464285714285714, | |
| "grad_norm": 0.10176528990268707, | |
| "learning_rate": 1.8941058730549132e-06, | |
| "loss": 0.0115, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 4.472402597402597, | |
| "grad_norm": 0.19561536610126495, | |
| "learning_rate": 1.8197049611514194e-06, | |
| "loss": 0.0139, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 4.48051948051948, | |
| "grad_norm": 0.18119896948337555, | |
| "learning_rate": 1.7467676357305561e-06, | |
| "loss": 0.0123, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 4.488636363636363, | |
| "grad_norm": 0.2555669844150543, | |
| "learning_rate": 1.6752961124301415e-06, | |
| "loss": 0.0154, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 4.496753246753247, | |
| "grad_norm": 0.21792449057102203, | |
| "learning_rate": 1.6052925623609049e-06, | |
| "loss": 0.0189, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 4.50487012987013, | |
| "grad_norm": 0.1742662638425827, | |
| "learning_rate": 1.5367591120405256e-06, | |
| "loss": 0.0112, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 4.512987012987013, | |
| "grad_norm": 0.14939716458320618, | |
| "learning_rate": 1.4696978433290653e-06, | |
| "loss": 0.0137, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 4.521103896103896, | |
| "grad_norm": 0.14489960670471191, | |
| "learning_rate": 1.4041107933656928e-06, | |
| "loss": 0.0148, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 4.529220779220779, | |
| "grad_norm": 0.2126537412405014, | |
| "learning_rate": 1.339999954506821e-06, | |
| "loss": 0.0133, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 4.537337662337662, | |
| "grad_norm": 0.2052183747291565, | |
| "learning_rate": 1.2773672742655784e-06, | |
| "loss": 0.0143, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 4.545454545454545, | |
| "grad_norm": 0.16533197462558746, | |
| "learning_rate": 1.2162146552526399e-06, | |
| "loss": 0.0145, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 4.553571428571429, | |
| "grad_norm": 0.1644553244113922, | |
| "learning_rate": 1.1565439551184664e-06, | |
| "loss": 0.019, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 4.561688311688312, | |
| "grad_norm": 0.11637627333402634, | |
| "learning_rate": 1.0983569864968346e-06, | |
| "loss": 0.013, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 4.569805194805195, | |
| "grad_norm": 0.12196806818246841, | |
| "learning_rate": 1.0416555169497688e-06, | |
| "loss": 0.0124, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 4.577922077922078, | |
| "grad_norm": 0.19040703773498535, | |
| "learning_rate": 9.864412689139123e-07, | |
| "loss": 0.0145, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 4.586038961038961, | |
| "grad_norm": 0.1617046594619751, | |
| "learning_rate": 9.327159196481138e-07, | |
| "loss": 0.0142, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 4.5941558441558445, | |
| "grad_norm": 0.14342036843299866, | |
| "learning_rate": 8.804811011825398e-07, | |
| "loss": 0.0102, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 4.6022727272727275, | |
| "grad_norm": 0.17284558713436127, | |
| "learning_rate": 8.297384002690866e-07, | |
| "loss": 0.0147, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 4.6103896103896105, | |
| "grad_norm": 0.2236797958612442, | |
| "learning_rate": 7.804893583331696e-07, | |
| "loss": 0.0127, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 4.6185064935064934, | |
| "grad_norm": 0.17601999640464783, | |
| "learning_rate": 7.32735471426893e-07, | |
| "loss": 0.017, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 4.626623376623376, | |
| "grad_norm": 0.14382389187812805, | |
| "learning_rate": 6.864781901836259e-07, | |
| "loss": 0.0143, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 4.634740259740259, | |
| "grad_norm": 0.13551688194274902, | |
| "learning_rate": 6.417189197739093e-07, | |
| "loss": 0.0156, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 4.642857142857143, | |
| "grad_norm": 0.14876428246498108, | |
| "learning_rate": 5.984590198627849e-07, | |
| "loss": 0.0171, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 4.650974025974026, | |
| "grad_norm": 0.1803891956806183, | |
| "learning_rate": 5.566998045685112e-07, | |
| "loss": 0.0161, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 4.659090909090909, | |
| "grad_norm": 0.2185366451740265, | |
| "learning_rate": 5.164425424226016e-07, | |
| "loss": 0.0119, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 4.667207792207792, | |
| "grad_norm": 0.1409493088722229, | |
| "learning_rate": 4.776884563313266e-07, | |
| "loss": 0.0127, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 4.675324675324675, | |
| "grad_norm": 0.12052121758460999, | |
| "learning_rate": 4.404387235385443e-07, | |
| "loss": 0.0128, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 4.683441558441558, | |
| "grad_norm": 0.14814621210098267, | |
| "learning_rate": 4.0469447558995065e-07, | |
| "loss": 0.0157, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 4.691558441558442, | |
| "grad_norm": 0.2510521113872528, | |
| "learning_rate": 3.7045679829870175e-07, | |
| "loss": 0.0152, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 4.699675324675325, | |
| "grad_norm": 0.18528954684734344, | |
| "learning_rate": 3.377267317124233e-07, | |
| "loss": 0.0153, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 4.707792207792208, | |
| "grad_norm": 0.3942748010158539, | |
| "learning_rate": 3.0650527008162513e-07, | |
| "loss": 0.0141, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 4.715909090909091, | |
| "grad_norm": 0.10147012770175934, | |
| "learning_rate": 2.767933618295082e-07, | |
| "loss": 0.0122, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 4.724025974025974, | |
| "grad_norm": 0.19354303181171417, | |
| "learning_rate": 2.485919095231326e-07, | |
| "loss": 0.014, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 4.732142857142857, | |
| "grad_norm": 0.1840514838695526, | |
| "learning_rate": 2.219017698460002e-07, | |
| "loss": 0.0164, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 4.740259740259741, | |
| "grad_norm": 0.3346545994281769, | |
| "learning_rate": 1.9672375357206452e-07, | |
| "loss": 0.0134, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 4.748376623376624, | |
| "grad_norm": 0.24521546065807343, | |
| "learning_rate": 1.73058625541056e-07, | |
| "loss": 0.0135, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 4.7564935064935066, | |
| "grad_norm": 0.184413880109787, | |
| "learning_rate": 1.5090710463527836e-07, | |
| "loss": 0.0173, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 4.7646103896103895, | |
| "grad_norm": 0.15935760736465454, | |
| "learning_rate": 1.3026986375776485e-07, | |
| "loss": 0.014, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 4.7727272727272725, | |
| "grad_norm": 0.24353346228599548, | |
| "learning_rate": 1.1114752981183917e-07, | |
| "loss": 0.0159, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 4.7808441558441555, | |
| "grad_norm": 0.17594245076179504, | |
| "learning_rate": 9.354068368204739e-08, | |
| "loss": 0.0159, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 4.788961038961039, | |
| "grad_norm": 0.15030580759048462, | |
| "learning_rate": 7.744986021656076e-08, | |
| "loss": 0.0138, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 4.797077922077922, | |
| "grad_norm": 0.20552325248718262, | |
| "learning_rate": 6.287554821087783e-08, | |
| "loss": 0.0141, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 4.805194805194805, | |
| "grad_norm": 0.16335158050060272, | |
| "learning_rate": 4.981819039300284e-08, | |
| "loss": 0.0156, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 4.813311688311688, | |
| "grad_norm": 0.29560235142707825, | |
| "learning_rate": 3.827818341000655e-08, | |
| "loss": 0.0161, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 4.821428571428571, | |
| "grad_norm": 0.41275930404663086, | |
| "learning_rate": 2.8255877815946963e-08, | |
| "loss": 0.0145, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 4.829545454545455, | |
| "grad_norm": 0.21795588731765747, | |
| "learning_rate": 1.9751578061244504e-08, | |
| "loss": 0.0118, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 4.837662337662338, | |
| "grad_norm": 0.16346313059329987, | |
| "learning_rate": 1.2765542483417214e-08, | |
| "loss": 0.0106, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 4.845779220779221, | |
| "grad_norm": 0.2356872707605362, | |
| "learning_rate": 7.2979832992592365e-09, | |
| "loss": 0.0133, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 4.853896103896104, | |
| "grad_norm": 0.14218561351299286, | |
| "learning_rate": 3.349066598362649e-09, | |
| "loss": 0.013, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 4.862012987012987, | |
| "grad_norm": 0.173988476395607, | |
| "learning_rate": 9.189123380826114e-10, | |
| "loss": 0.0128, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 4.87012987012987, | |
| "grad_norm": 0.30807921290397644, | |
| "learning_rate": 7.594339912486703e-12, | |
| "loss": 0.0155, | |
| "step": 6000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 6000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |