diff --git "a/checkpoint-1500/trainer_state.json" "b/checkpoint-1500/trainer_state.json" new file mode 100644--- /dev/null +++ "b/checkpoint-1500/trainer_state.json" @@ -0,0 +1,10533 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 0.6687472135532768, + "eval_steps": 500, + "global_step": 1500, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.00044583147570218456, + "grad_norm": 0.41694900393486023, + "learning_rate": 2.666666666666667e-07, + "loss": 1.9488, + "step": 1 + }, + { + "epoch": 0.0008916629514043691, + "grad_norm": 0.4325166940689087, + "learning_rate": 5.333333333333335e-07, + "loss": 1.9474, + "step": 2 + }, + { + "epoch": 0.0013374944271065537, + "grad_norm": 0.5329936146736145, + "learning_rate": 8.000000000000001e-07, + "loss": 1.9654, + "step": 3 + }, + { + "epoch": 0.0017833259028087382, + "grad_norm": 0.45425093173980713, + "learning_rate": 1.066666666666667e-06, + "loss": 2.0171, + "step": 4 + }, + { + "epoch": 0.002229157378510923, + "grad_norm": 0.42153477668762207, + "learning_rate": 1.3333333333333334e-06, + "loss": 1.9727, + "step": 5 + }, + { + "epoch": 0.0026749888542131075, + "grad_norm": 0.42438578605651855, + "learning_rate": 1.6000000000000001e-06, + "loss": 1.9308, + "step": 6 + }, + { + "epoch": 0.003120820329915292, + "grad_norm": 0.4049244225025177, + "learning_rate": 1.8666666666666669e-06, + "loss": 1.9581, + "step": 7 + }, + { + "epoch": 0.0035666518056174765, + "grad_norm": 0.40796998143196106, + "learning_rate": 2.133333333333334e-06, + "loss": 1.9542, + "step": 8 + }, + { + "epoch": 0.004012483281319661, + "grad_norm": 0.3876088261604309, + "learning_rate": 2.4000000000000003e-06, + "loss": 1.9169, + "step": 9 + }, + { + "epoch": 0.004458314757021846, + "grad_norm": 0.41765469312667847, + "learning_rate": 2.666666666666667e-06, + "loss": 1.9551, + "step": 10 + }, + { + "epoch": 0.00490414623272403, + "grad_norm": 0.37440115213394165, + "learning_rate": 2.9333333333333338e-06, + "loss": 1.9341, + "step": 11 + }, + { + "epoch": 0.005349977708426215, + "grad_norm": 0.3740348815917969, + "learning_rate": 3.2000000000000003e-06, + "loss": 1.9513, + "step": 12 + }, + { + "epoch": 0.0057958091841284, + "grad_norm": 0.34098368883132935, + "learning_rate": 3.4666666666666672e-06, + "loss": 1.9697, + "step": 13 + }, + { + "epoch": 0.006241640659830584, + "grad_norm": 0.3392350375652313, + "learning_rate": 3.7333333333333337e-06, + "loss": 1.8661, + "step": 14 + }, + { + "epoch": 0.006687472135532769, + "grad_norm": 0.30836546421051025, + "learning_rate": 4.000000000000001e-06, + "loss": 1.9398, + "step": 15 + }, + { + "epoch": 0.007133303611234953, + "grad_norm": 0.3208477199077606, + "learning_rate": 4.266666666666668e-06, + "loss": 1.9136, + "step": 16 + }, + { + "epoch": 0.007579135086937138, + "grad_norm": 0.31885215640068054, + "learning_rate": 4.533333333333334e-06, + "loss": 1.9242, + "step": 17 + }, + { + "epoch": 0.008024966562639322, + "grad_norm": 0.27593743801116943, + "learning_rate": 4.800000000000001e-06, + "loss": 1.8541, + "step": 18 + }, + { + "epoch": 0.008470798038341507, + "grad_norm": 0.2847069203853607, + "learning_rate": 5.0666666666666676e-06, + "loss": 1.9634, + "step": 19 + }, + { + "epoch": 0.008916629514043692, + "grad_norm": 0.274640828371048, + "learning_rate": 5.333333333333334e-06, + "loss": 1.8762, + "step": 20 + }, + { + "epoch": 0.009362460989745877, + "grad_norm": 0.2889622449874878, + "learning_rate": 5.600000000000001e-06, + "loss": 1.9744, + "step": 21 + }, + { + "epoch": 0.00980829246544806, + "grad_norm": 0.28000110387802124, + "learning_rate": 5.8666666666666675e-06, + "loss": 1.9691, + "step": 22 + }, + { + "epoch": 0.010254123941150245, + "grad_norm": 0.2684955894947052, + "learning_rate": 6.133333333333334e-06, + "loss": 1.9243, + "step": 23 + }, + { + "epoch": 0.01069995541685243, + "grad_norm": 0.2879243791103363, + "learning_rate": 6.4000000000000006e-06, + "loss": 2.062, + "step": 24 + }, + { + "epoch": 0.011145786892554615, + "grad_norm": 0.2579769492149353, + "learning_rate": 6.666666666666667e-06, + "loss": 1.9667, + "step": 25 + }, + { + "epoch": 0.0115916183682568, + "grad_norm": 0.2598419785499573, + "learning_rate": 6.9333333333333344e-06, + "loss": 1.9397, + "step": 26 + }, + { + "epoch": 0.012037449843958983, + "grad_norm": 0.2543875575065613, + "learning_rate": 7.2000000000000005e-06, + "loss": 1.8795, + "step": 27 + }, + { + "epoch": 0.012483281319661168, + "grad_norm": 0.2683740258216858, + "learning_rate": 7.4666666666666675e-06, + "loss": 2.039, + "step": 28 + }, + { + "epoch": 0.012929112795363353, + "grad_norm": 0.22718575596809387, + "learning_rate": 7.733333333333334e-06, + "loss": 1.8735, + "step": 29 + }, + { + "epoch": 0.013374944271065538, + "grad_norm": 0.2308313548564911, + "learning_rate": 8.000000000000001e-06, + "loss": 1.9403, + "step": 30 + }, + { + "epoch": 0.013820775746767721, + "grad_norm": 0.22583268582820892, + "learning_rate": 8.266666666666667e-06, + "loss": 1.9798, + "step": 31 + }, + { + "epoch": 0.014266607222469906, + "grad_norm": 0.2153775542974472, + "learning_rate": 8.533333333333335e-06, + "loss": 1.8289, + "step": 32 + }, + { + "epoch": 0.014712438698172091, + "grad_norm": 0.21741226315498352, + "learning_rate": 8.8e-06, + "loss": 1.8725, + "step": 33 + }, + { + "epoch": 0.015158270173874276, + "grad_norm": 0.24217155575752258, + "learning_rate": 9.066666666666667e-06, + "loss": 2.0008, + "step": 34 + }, + { + "epoch": 0.01560410164957646, + "grad_norm": 0.21178147196769714, + "learning_rate": 9.333333333333334e-06, + "loss": 1.8646, + "step": 35 + }, + { + "epoch": 0.016049933125278644, + "grad_norm": 0.2568570077419281, + "learning_rate": 9.600000000000001e-06, + "loss": 1.8402, + "step": 36 + }, + { + "epoch": 0.01649576460098083, + "grad_norm": 0.21020956337451935, + "learning_rate": 9.866666666666668e-06, + "loss": 1.8991, + "step": 37 + }, + { + "epoch": 0.016941596076683014, + "grad_norm": 0.2205541729927063, + "learning_rate": 1.0133333333333335e-05, + "loss": 1.8296, + "step": 38 + }, + { + "epoch": 0.017387427552385197, + "grad_norm": 0.23295548558235168, + "learning_rate": 1.04e-05, + "loss": 1.8858, + "step": 39 + }, + { + "epoch": 0.017833259028087384, + "grad_norm": 0.20862042903900146, + "learning_rate": 1.0666666666666667e-05, + "loss": 1.8589, + "step": 40 + }, + { + "epoch": 0.018279090503789567, + "grad_norm": 0.20791663229465485, + "learning_rate": 1.0933333333333334e-05, + "loss": 1.9105, + "step": 41 + }, + { + "epoch": 0.018724921979491754, + "grad_norm": 0.23138144612312317, + "learning_rate": 1.1200000000000001e-05, + "loss": 1.9205, + "step": 42 + }, + { + "epoch": 0.019170753455193937, + "grad_norm": 0.2184125930070877, + "learning_rate": 1.1466666666666668e-05, + "loss": 1.9443, + "step": 43 + }, + { + "epoch": 0.01961658493089612, + "grad_norm": 0.19976778328418732, + "learning_rate": 1.1733333333333335e-05, + "loss": 1.7878, + "step": 44 + }, + { + "epoch": 0.020062416406598307, + "grad_norm": 0.19608213007450104, + "learning_rate": 1.2e-05, + "loss": 1.8668, + "step": 45 + }, + { + "epoch": 0.02050824788230049, + "grad_norm": 0.2110237330198288, + "learning_rate": 1.2266666666666667e-05, + "loss": 1.9117, + "step": 46 + }, + { + "epoch": 0.020954079358002677, + "grad_norm": 0.21283794939517975, + "learning_rate": 1.2533333333333336e-05, + "loss": 1.9009, + "step": 47 + }, + { + "epoch": 0.02139991083370486, + "grad_norm": 0.21566396951675415, + "learning_rate": 1.2800000000000001e-05, + "loss": 1.8339, + "step": 48 + }, + { + "epoch": 0.021845742309407043, + "grad_norm": 0.21194687485694885, + "learning_rate": 1.3066666666666668e-05, + "loss": 1.9119, + "step": 49 + }, + { + "epoch": 0.02229157378510923, + "grad_norm": 0.21619786322116852, + "learning_rate": 1.3333333333333333e-05, + "loss": 1.9621, + "step": 50 + }, + { + "epoch": 0.022737405260811413, + "grad_norm": 0.21244817972183228, + "learning_rate": 1.3600000000000002e-05, + "loss": 1.8185, + "step": 51 + }, + { + "epoch": 0.0231832367365136, + "grad_norm": 0.20910470187664032, + "learning_rate": 1.3866666666666669e-05, + "loss": 1.8852, + "step": 52 + }, + { + "epoch": 0.023629068212215783, + "grad_norm": 0.2056826651096344, + "learning_rate": 1.4133333333333334e-05, + "loss": 1.7534, + "step": 53 + }, + { + "epoch": 0.024074899687917966, + "grad_norm": 0.20464245975017548, + "learning_rate": 1.4400000000000001e-05, + "loss": 1.8634, + "step": 54 + }, + { + "epoch": 0.024520731163620153, + "grad_norm": 0.21582286059856415, + "learning_rate": 1.4666666666666666e-05, + "loss": 1.8456, + "step": 55 + }, + { + "epoch": 0.024966562639322336, + "grad_norm": 0.21299712359905243, + "learning_rate": 1.4933333333333335e-05, + "loss": 1.8615, + "step": 56 + }, + { + "epoch": 0.02541239411502452, + "grad_norm": 0.22036010026931763, + "learning_rate": 1.5200000000000002e-05, + "loss": 1.9713, + "step": 57 + }, + { + "epoch": 0.025858225590726706, + "grad_norm": 0.2094155102968216, + "learning_rate": 1.546666666666667e-05, + "loss": 1.8669, + "step": 58 + }, + { + "epoch": 0.02630405706642889, + "grad_norm": 0.219874769449234, + "learning_rate": 1.5733333333333334e-05, + "loss": 1.9496, + "step": 59 + }, + { + "epoch": 0.026749888542131076, + "grad_norm": 0.2156878113746643, + "learning_rate": 1.6000000000000003e-05, + "loss": 1.8569, + "step": 60 + }, + { + "epoch": 0.02719572001783326, + "grad_norm": 0.2157766968011856, + "learning_rate": 1.6266666666666668e-05, + "loss": 1.836, + "step": 61 + }, + { + "epoch": 0.027641551493535442, + "grad_norm": 0.2111901044845581, + "learning_rate": 1.6533333333333333e-05, + "loss": 1.8721, + "step": 62 + }, + { + "epoch": 0.02808738296923763, + "grad_norm": 0.2262468785047531, + "learning_rate": 1.6800000000000002e-05, + "loss": 1.9684, + "step": 63 + }, + { + "epoch": 0.028533214444939812, + "grad_norm": 0.21171987056732178, + "learning_rate": 1.706666666666667e-05, + "loss": 1.7913, + "step": 64 + }, + { + "epoch": 0.028979045920642, + "grad_norm": 0.2222784012556076, + "learning_rate": 1.7333333333333336e-05, + "loss": 1.8713, + "step": 65 + }, + { + "epoch": 0.029424877396344182, + "grad_norm": 0.22467799484729767, + "learning_rate": 1.76e-05, + "loss": 1.8577, + "step": 66 + }, + { + "epoch": 0.029870708872046365, + "grad_norm": 0.22000645101070404, + "learning_rate": 1.7866666666666666e-05, + "loss": 1.7445, + "step": 67 + }, + { + "epoch": 0.03031654034774855, + "grad_norm": 0.21429285407066345, + "learning_rate": 1.8133333333333335e-05, + "loss": 1.7858, + "step": 68 + }, + { + "epoch": 0.030762371823450735, + "grad_norm": 0.2181319147348404, + "learning_rate": 1.8400000000000003e-05, + "loss": 1.8239, + "step": 69 + }, + { + "epoch": 0.03120820329915292, + "grad_norm": 0.23378431797027588, + "learning_rate": 1.866666666666667e-05, + "loss": 1.8719, + "step": 70 + }, + { + "epoch": 0.03165403477485511, + "grad_norm": 0.23543697595596313, + "learning_rate": 1.8933333333333334e-05, + "loss": 1.9101, + "step": 71 + }, + { + "epoch": 0.03209986625055729, + "grad_norm": 0.22190536558628082, + "learning_rate": 1.9200000000000003e-05, + "loss": 1.8254, + "step": 72 + }, + { + "epoch": 0.032545697726259475, + "grad_norm": 0.2234829217195511, + "learning_rate": 1.9466666666666668e-05, + "loss": 1.7942, + "step": 73 + }, + { + "epoch": 0.03299152920196166, + "grad_norm": 0.2278570681810379, + "learning_rate": 1.9733333333333336e-05, + "loss": 1.8664, + "step": 74 + }, + { + "epoch": 0.03343736067766384, + "grad_norm": 0.236073300242424, + "learning_rate": 2e-05, + "loss": 1.8614, + "step": 75 + }, + { + "epoch": 0.03388319215336603, + "grad_norm": 0.22639039158821106, + "learning_rate": 2.026666666666667e-05, + "loss": 1.8326, + "step": 76 + }, + { + "epoch": 0.034329023629068214, + "grad_norm": 0.23513582348823547, + "learning_rate": 2.0533333333333336e-05, + "loss": 1.8779, + "step": 77 + }, + { + "epoch": 0.034774855104770394, + "grad_norm": 0.24936141073703766, + "learning_rate": 2.08e-05, + "loss": 1.8061, + "step": 78 + }, + { + "epoch": 0.03522068658047258, + "grad_norm": 0.2312684804201126, + "learning_rate": 2.1066666666666666e-05, + "loss": 1.8214, + "step": 79 + }, + { + "epoch": 0.03566651805617477, + "grad_norm": 0.24221794307231903, + "learning_rate": 2.1333333333333335e-05, + "loss": 1.8691, + "step": 80 + }, + { + "epoch": 0.03611234953187695, + "grad_norm": 0.24977678060531616, + "learning_rate": 2.1600000000000003e-05, + "loss": 1.87, + "step": 81 + }, + { + "epoch": 0.036558181007579134, + "grad_norm": 0.24968618154525757, + "learning_rate": 2.186666666666667e-05, + "loss": 1.7725, + "step": 82 + }, + { + "epoch": 0.03700401248328132, + "grad_norm": 0.24534040689468384, + "learning_rate": 2.2133333333333337e-05, + "loss": 1.7268, + "step": 83 + }, + { + "epoch": 0.03744984395898351, + "grad_norm": 0.2817310392856598, + "learning_rate": 2.2400000000000002e-05, + "loss": 1.8211, + "step": 84 + }, + { + "epoch": 0.03789567543468569, + "grad_norm": 0.24851234257221222, + "learning_rate": 2.2666666666666668e-05, + "loss": 1.801, + "step": 85 + }, + { + "epoch": 0.038341506910387874, + "grad_norm": 0.2601252496242523, + "learning_rate": 2.2933333333333336e-05, + "loss": 1.8748, + "step": 86 + }, + { + "epoch": 0.03878733838609006, + "grad_norm": 0.25771164894104004, + "learning_rate": 2.32e-05, + "loss": 1.8054, + "step": 87 + }, + { + "epoch": 0.03923316986179224, + "grad_norm": 0.25612396001815796, + "learning_rate": 2.346666666666667e-05, + "loss": 1.8031, + "step": 88 + }, + { + "epoch": 0.03967900133749443, + "grad_norm": 0.30037111043930054, + "learning_rate": 2.373333333333334e-05, + "loss": 1.7969, + "step": 89 + }, + { + "epoch": 0.04012483281319661, + "grad_norm": 0.2585238218307495, + "learning_rate": 2.4e-05, + "loss": 1.8384, + "step": 90 + }, + { + "epoch": 0.04057066428889879, + "grad_norm": 0.26387372612953186, + "learning_rate": 2.426666666666667e-05, + "loss": 1.9031, + "step": 91 + }, + { + "epoch": 0.04101649576460098, + "grad_norm": 0.2529267370700836, + "learning_rate": 2.4533333333333334e-05, + "loss": 1.8204, + "step": 92 + }, + { + "epoch": 0.041462327240303166, + "grad_norm": 0.25664445757865906, + "learning_rate": 2.4800000000000003e-05, + "loss": 1.8447, + "step": 93 + }, + { + "epoch": 0.04190815871600535, + "grad_norm": 0.2572501301765442, + "learning_rate": 2.5066666666666672e-05, + "loss": 1.8514, + "step": 94 + }, + { + "epoch": 0.04235399019170753, + "grad_norm": 0.26171356439590454, + "learning_rate": 2.5333333333333334e-05, + "loss": 1.7994, + "step": 95 + }, + { + "epoch": 0.04279982166740972, + "grad_norm": 0.2504459619522095, + "learning_rate": 2.5600000000000002e-05, + "loss": 1.8271, + "step": 96 + }, + { + "epoch": 0.043245653143111906, + "grad_norm": 0.25057297945022583, + "learning_rate": 2.5866666666666667e-05, + "loss": 1.7822, + "step": 97 + }, + { + "epoch": 0.043691484618814086, + "grad_norm": 0.271733820438385, + "learning_rate": 2.6133333333333336e-05, + "loss": 1.8345, + "step": 98 + }, + { + "epoch": 0.04413731609451627, + "grad_norm": 0.2711006700992584, + "learning_rate": 2.6400000000000005e-05, + "loss": 1.8945, + "step": 99 + }, + { + "epoch": 0.04458314757021846, + "grad_norm": 0.29651570320129395, + "learning_rate": 2.6666666666666667e-05, + "loss": 1.923, + "step": 100 + }, + { + "epoch": 0.04502897904592064, + "grad_norm": 0.28082215785980225, + "learning_rate": 2.6933333333333335e-05, + "loss": 1.8876, + "step": 101 + }, + { + "epoch": 0.045474810521622826, + "grad_norm": 0.26620352268218994, + "learning_rate": 2.7200000000000004e-05, + "loss": 1.8855, + "step": 102 + }, + { + "epoch": 0.04592064199732501, + "grad_norm": 0.2536965310573578, + "learning_rate": 2.746666666666667e-05, + "loss": 1.7922, + "step": 103 + }, + { + "epoch": 0.0463664734730272, + "grad_norm": 0.26968443393707275, + "learning_rate": 2.7733333333333338e-05, + "loss": 1.7784, + "step": 104 + }, + { + "epoch": 0.04681230494872938, + "grad_norm": 0.2990209460258484, + "learning_rate": 2.8e-05, + "loss": 1.8285, + "step": 105 + }, + { + "epoch": 0.047258136424431565, + "grad_norm": 0.294802188873291, + "learning_rate": 2.8266666666666668e-05, + "loss": 1.8964, + "step": 106 + }, + { + "epoch": 0.04770396790013375, + "grad_norm": 0.3011243939399719, + "learning_rate": 2.8533333333333337e-05, + "loss": 1.8652, + "step": 107 + }, + { + "epoch": 0.04814979937583593, + "grad_norm": 0.292427659034729, + "learning_rate": 2.8800000000000002e-05, + "loss": 1.7647, + "step": 108 + }, + { + "epoch": 0.04859563085153812, + "grad_norm": 0.2936179041862488, + "learning_rate": 2.906666666666667e-05, + "loss": 1.8371, + "step": 109 + }, + { + "epoch": 0.049041462327240305, + "grad_norm": 0.28111955523490906, + "learning_rate": 2.9333333333333333e-05, + "loss": 1.8064, + "step": 110 + }, + { + "epoch": 0.049487293802942485, + "grad_norm": 0.3002088963985443, + "learning_rate": 2.96e-05, + "loss": 1.8461, + "step": 111 + }, + { + "epoch": 0.04993312527864467, + "grad_norm": 0.2941628694534302, + "learning_rate": 2.986666666666667e-05, + "loss": 1.8619, + "step": 112 + }, + { + "epoch": 0.05037895675434686, + "grad_norm": 0.27041497826576233, + "learning_rate": 3.0133333333333335e-05, + "loss": 1.761, + "step": 113 + }, + { + "epoch": 0.05082478823004904, + "grad_norm": 0.3027989864349365, + "learning_rate": 3.0400000000000004e-05, + "loss": 1.8031, + "step": 114 + }, + { + "epoch": 0.051270619705751225, + "grad_norm": 0.2786939740180969, + "learning_rate": 3.066666666666667e-05, + "loss": 1.8061, + "step": 115 + }, + { + "epoch": 0.05171645118145341, + "grad_norm": 0.2974098324775696, + "learning_rate": 3.093333333333334e-05, + "loss": 1.9473, + "step": 116 + }, + { + "epoch": 0.0521622826571556, + "grad_norm": 0.32027050852775574, + "learning_rate": 3.1200000000000006e-05, + "loss": 1.7295, + "step": 117 + }, + { + "epoch": 0.05260811413285778, + "grad_norm": 0.27729111909866333, + "learning_rate": 3.146666666666667e-05, + "loss": 1.6948, + "step": 118 + }, + { + "epoch": 0.053053945608559964, + "grad_norm": 0.34655502438545227, + "learning_rate": 3.173333333333334e-05, + "loss": 1.8459, + "step": 119 + }, + { + "epoch": 0.05349977708426215, + "grad_norm": 0.29461273550987244, + "learning_rate": 3.2000000000000005e-05, + "loss": 1.8, + "step": 120 + }, + { + "epoch": 0.05394560855996433, + "grad_norm": 0.3321416676044464, + "learning_rate": 3.226666666666667e-05, + "loss": 1.8233, + "step": 121 + }, + { + "epoch": 0.05439144003566652, + "grad_norm": 0.29211312532424927, + "learning_rate": 3.2533333333333336e-05, + "loss": 1.8488, + "step": 122 + }, + { + "epoch": 0.054837271511368704, + "grad_norm": 0.3023246228694916, + "learning_rate": 3.28e-05, + "loss": 1.8617, + "step": 123 + }, + { + "epoch": 0.055283102987070884, + "grad_norm": 0.3080272078514099, + "learning_rate": 3.3066666666666666e-05, + "loss": 1.8749, + "step": 124 + }, + { + "epoch": 0.05572893446277307, + "grad_norm": 0.29739588499069214, + "learning_rate": 3.3333333333333335e-05, + "loss": 1.9163, + "step": 125 + }, + { + "epoch": 0.05617476593847526, + "grad_norm": 0.3099679946899414, + "learning_rate": 3.3600000000000004e-05, + "loss": 1.8256, + "step": 126 + }, + { + "epoch": 0.056620597414177444, + "grad_norm": 0.3503029942512512, + "learning_rate": 3.386666666666667e-05, + "loss": 1.9656, + "step": 127 + }, + { + "epoch": 0.057066428889879624, + "grad_norm": 0.35092490911483765, + "learning_rate": 3.413333333333334e-05, + "loss": 1.9083, + "step": 128 + }, + { + "epoch": 0.05751226036558181, + "grad_norm": 0.28823041915893555, + "learning_rate": 3.44e-05, + "loss": 1.837, + "step": 129 + }, + { + "epoch": 0.057958091841284, + "grad_norm": 0.3080367147922516, + "learning_rate": 3.466666666666667e-05, + "loss": 1.8351, + "step": 130 + }, + { + "epoch": 0.05840392331698618, + "grad_norm": 0.27589717507362366, + "learning_rate": 3.493333333333333e-05, + "loss": 1.8196, + "step": 131 + }, + { + "epoch": 0.058849754792688364, + "grad_norm": 0.310061514377594, + "learning_rate": 3.52e-05, + "loss": 1.8258, + "step": 132 + }, + { + "epoch": 0.05929558626839055, + "grad_norm": 0.30034536123275757, + "learning_rate": 3.546666666666667e-05, + "loss": 1.7869, + "step": 133 + }, + { + "epoch": 0.05974141774409273, + "grad_norm": 0.3570665419101715, + "learning_rate": 3.573333333333333e-05, + "loss": 1.8544, + "step": 134 + }, + { + "epoch": 0.06018724921979492, + "grad_norm": 0.3011762201786041, + "learning_rate": 3.6e-05, + "loss": 1.8537, + "step": 135 + }, + { + "epoch": 0.0606330806954971, + "grad_norm": 0.3773946762084961, + "learning_rate": 3.626666666666667e-05, + "loss": 1.8586, + "step": 136 + }, + { + "epoch": 0.06107891217119929, + "grad_norm": 0.27055782079696655, + "learning_rate": 3.653333333333334e-05, + "loss": 1.8264, + "step": 137 + }, + { + "epoch": 0.06152474364690147, + "grad_norm": 0.3715250492095947, + "learning_rate": 3.680000000000001e-05, + "loss": 1.8942, + "step": 138 + }, + { + "epoch": 0.061970575122603656, + "grad_norm": 0.29873985052108765, + "learning_rate": 3.706666666666667e-05, + "loss": 1.8412, + "step": 139 + }, + { + "epoch": 0.06241640659830584, + "grad_norm": 0.31906992197036743, + "learning_rate": 3.733333333333334e-05, + "loss": 1.7687, + "step": 140 + }, + { + "epoch": 0.06286223807400802, + "grad_norm": 0.2958746552467346, + "learning_rate": 3.76e-05, + "loss": 1.8127, + "step": 141 + }, + { + "epoch": 0.06330806954971022, + "grad_norm": 0.32697004079818726, + "learning_rate": 3.786666666666667e-05, + "loss": 1.8661, + "step": 142 + }, + { + "epoch": 0.0637539010254124, + "grad_norm": 0.2766285240650177, + "learning_rate": 3.8133333333333336e-05, + "loss": 1.7151, + "step": 143 + }, + { + "epoch": 0.06419973250111458, + "grad_norm": 0.31309226155281067, + "learning_rate": 3.8400000000000005e-05, + "loss": 1.8474, + "step": 144 + }, + { + "epoch": 0.06464556397681677, + "grad_norm": 0.2824843227863312, + "learning_rate": 3.866666666666667e-05, + "loss": 1.8155, + "step": 145 + }, + { + "epoch": 0.06509139545251895, + "grad_norm": 0.2713295817375183, + "learning_rate": 3.8933333333333336e-05, + "loss": 1.8877, + "step": 146 + }, + { + "epoch": 0.06553722692822113, + "grad_norm": 0.35191434621810913, + "learning_rate": 3.9200000000000004e-05, + "loss": 1.7751, + "step": 147 + }, + { + "epoch": 0.06598305840392332, + "grad_norm": 0.30430588126182556, + "learning_rate": 3.946666666666667e-05, + "loss": 1.8379, + "step": 148 + }, + { + "epoch": 0.0664288898796255, + "grad_norm": 0.34200629591941833, + "learning_rate": 3.9733333333333335e-05, + "loss": 1.7795, + "step": 149 + }, + { + "epoch": 0.06687472135532768, + "grad_norm": 0.28752633929252625, + "learning_rate": 4e-05, + "loss": 1.8612, + "step": 150 + }, + { + "epoch": 0.06732055283102988, + "grad_norm": 0.3259822726249695, + "learning_rate": 4.0266666666666665e-05, + "loss": 1.8534, + "step": 151 + }, + { + "epoch": 0.06776638430673206, + "grad_norm": 0.2901393175125122, + "learning_rate": 4.053333333333334e-05, + "loss": 1.8215, + "step": 152 + }, + { + "epoch": 0.06821221578243424, + "grad_norm": 0.30397653579711914, + "learning_rate": 4.08e-05, + "loss": 1.7901, + "step": 153 + }, + { + "epoch": 0.06865804725813643, + "grad_norm": 0.31820032000541687, + "learning_rate": 4.106666666666667e-05, + "loss": 1.7694, + "step": 154 + }, + { + "epoch": 0.06910387873383861, + "grad_norm": 0.31631478667259216, + "learning_rate": 4.133333333333334e-05, + "loss": 1.7254, + "step": 155 + }, + { + "epoch": 0.06954971020954079, + "grad_norm": 0.32512184977531433, + "learning_rate": 4.16e-05, + "loss": 1.7531, + "step": 156 + }, + { + "epoch": 0.06999554168524298, + "grad_norm": 0.2968920171260834, + "learning_rate": 4.186666666666667e-05, + "loss": 1.8147, + "step": 157 + }, + { + "epoch": 0.07044137316094516, + "grad_norm": 0.335467129945755, + "learning_rate": 4.213333333333333e-05, + "loss": 1.7788, + "step": 158 + }, + { + "epoch": 0.07088720463664734, + "grad_norm": 0.33457300066947937, + "learning_rate": 4.240000000000001e-05, + "loss": 1.874, + "step": 159 + }, + { + "epoch": 0.07133303611234953, + "grad_norm": 0.3013593554496765, + "learning_rate": 4.266666666666667e-05, + "loss": 1.8856, + "step": 160 + }, + { + "epoch": 0.07177886758805171, + "grad_norm": 0.273561030626297, + "learning_rate": 4.293333333333333e-05, + "loss": 1.7459, + "step": 161 + }, + { + "epoch": 0.0722246990637539, + "grad_norm": 0.283621221780777, + "learning_rate": 4.3200000000000007e-05, + "loss": 1.8431, + "step": 162 + }, + { + "epoch": 0.07267053053945609, + "grad_norm": 0.27030712366104126, + "learning_rate": 4.346666666666667e-05, + "loss": 1.7615, + "step": 163 + }, + { + "epoch": 0.07311636201515827, + "grad_norm": 0.27857705950737, + "learning_rate": 4.373333333333334e-05, + "loss": 1.8442, + "step": 164 + }, + { + "epoch": 0.07356219349086046, + "grad_norm": 0.2681543827056885, + "learning_rate": 4.4000000000000006e-05, + "loss": 1.7566, + "step": 165 + }, + { + "epoch": 0.07400802496656264, + "grad_norm": 0.28761669993400574, + "learning_rate": 4.4266666666666674e-05, + "loss": 1.7749, + "step": 166 + }, + { + "epoch": 0.07445385644226482, + "grad_norm": 0.318764328956604, + "learning_rate": 4.4533333333333336e-05, + "loss": 1.8249, + "step": 167 + }, + { + "epoch": 0.07489968791796701, + "grad_norm": 0.2913915514945984, + "learning_rate": 4.4800000000000005e-05, + "loss": 1.8324, + "step": 168 + }, + { + "epoch": 0.0753455193936692, + "grad_norm": 0.2661246061325073, + "learning_rate": 4.506666666666667e-05, + "loss": 1.8126, + "step": 169 + }, + { + "epoch": 0.07579135086937137, + "grad_norm": 0.29662206768989563, + "learning_rate": 4.5333333333333335e-05, + "loss": 1.84, + "step": 170 + }, + { + "epoch": 0.07623718234507357, + "grad_norm": 0.3414687216281891, + "learning_rate": 4.56e-05, + "loss": 1.8259, + "step": 171 + }, + { + "epoch": 0.07668301382077575, + "grad_norm": 0.29384845495224, + "learning_rate": 4.586666666666667e-05, + "loss": 1.8341, + "step": 172 + }, + { + "epoch": 0.07712884529647793, + "grad_norm": 0.313362717628479, + "learning_rate": 4.6133333333333334e-05, + "loss": 1.8403, + "step": 173 + }, + { + "epoch": 0.07757467677218012, + "grad_norm": 0.27511510252952576, + "learning_rate": 4.64e-05, + "loss": 1.8502, + "step": 174 + }, + { + "epoch": 0.0780205082478823, + "grad_norm": 0.28761816024780273, + "learning_rate": 4.666666666666667e-05, + "loss": 1.8157, + "step": 175 + }, + { + "epoch": 0.07846633972358448, + "grad_norm": 0.30127012729644775, + "learning_rate": 4.693333333333334e-05, + "loss": 1.8559, + "step": 176 + }, + { + "epoch": 0.07891217119928667, + "grad_norm": 0.26424428820610046, + "learning_rate": 4.72e-05, + "loss": 1.81, + "step": 177 + }, + { + "epoch": 0.07935800267498885, + "grad_norm": 0.34712114930152893, + "learning_rate": 4.746666666666668e-05, + "loss": 1.8874, + "step": 178 + }, + { + "epoch": 0.07980383415069103, + "grad_norm": 0.34591299295425415, + "learning_rate": 4.773333333333334e-05, + "loss": 1.7735, + "step": 179 + }, + { + "epoch": 0.08024966562639323, + "grad_norm": 0.3041944205760956, + "learning_rate": 4.8e-05, + "loss": 1.8873, + "step": 180 + }, + { + "epoch": 0.0806954971020954, + "grad_norm": 0.3604406714439392, + "learning_rate": 4.826666666666668e-05, + "loss": 1.7453, + "step": 181 + }, + { + "epoch": 0.08114132857779759, + "grad_norm": 0.2718447744846344, + "learning_rate": 4.853333333333334e-05, + "loss": 1.7958, + "step": 182 + }, + { + "epoch": 0.08158716005349978, + "grad_norm": 0.42196327447891235, + "learning_rate": 4.88e-05, + "loss": 1.8315, + "step": 183 + }, + { + "epoch": 0.08203299152920196, + "grad_norm": 0.24650803208351135, + "learning_rate": 4.906666666666667e-05, + "loss": 1.7289, + "step": 184 + }, + { + "epoch": 0.08247882300490414, + "grad_norm": 0.35063791275024414, + "learning_rate": 4.933333333333334e-05, + "loss": 1.7582, + "step": 185 + }, + { + "epoch": 0.08292465448060633, + "grad_norm": 0.2629041373729706, + "learning_rate": 4.9600000000000006e-05, + "loss": 1.9033, + "step": 186 + }, + { + "epoch": 0.08337048595630851, + "grad_norm": 0.292608380317688, + "learning_rate": 4.986666666666667e-05, + "loss": 1.8062, + "step": 187 + }, + { + "epoch": 0.0838163174320107, + "grad_norm": 0.2840345501899719, + "learning_rate": 5.0133333333333343e-05, + "loss": 1.818, + "step": 188 + }, + { + "epoch": 0.08426214890771289, + "grad_norm": 0.3064083755016327, + "learning_rate": 5.0400000000000005e-05, + "loss": 1.7439, + "step": 189 + }, + { + "epoch": 0.08470798038341507, + "grad_norm": 0.2645147740840912, + "learning_rate": 5.066666666666667e-05, + "loss": 1.9059, + "step": 190 + }, + { + "epoch": 0.08515381185911726, + "grad_norm": 0.2822587192058563, + "learning_rate": 5.093333333333334e-05, + "loss": 1.8309, + "step": 191 + }, + { + "epoch": 0.08559964333481944, + "grad_norm": 0.2765088379383087, + "learning_rate": 5.1200000000000004e-05, + "loss": 1.8497, + "step": 192 + }, + { + "epoch": 0.08604547481052162, + "grad_norm": 0.27604249119758606, + "learning_rate": 5.1466666666666666e-05, + "loss": 1.6965, + "step": 193 + }, + { + "epoch": 0.08649130628622381, + "grad_norm": 0.24740959703922272, + "learning_rate": 5.1733333333333335e-05, + "loss": 1.7282, + "step": 194 + }, + { + "epoch": 0.08693713776192599, + "grad_norm": 0.28995606303215027, + "learning_rate": 5.2000000000000004e-05, + "loss": 1.8073, + "step": 195 + }, + { + "epoch": 0.08738296923762817, + "grad_norm": 0.2612221837043762, + "learning_rate": 5.226666666666667e-05, + "loss": 1.8055, + "step": 196 + }, + { + "epoch": 0.08782880071333037, + "grad_norm": 0.2690058648586273, + "learning_rate": 5.2533333333333334e-05, + "loss": 1.8054, + "step": 197 + }, + { + "epoch": 0.08827463218903255, + "grad_norm": 0.28274258971214294, + "learning_rate": 5.280000000000001e-05, + "loss": 1.7532, + "step": 198 + }, + { + "epoch": 0.08872046366473473, + "grad_norm": 0.2919791340827942, + "learning_rate": 5.306666666666667e-05, + "loss": 1.8258, + "step": 199 + }, + { + "epoch": 0.08916629514043692, + "grad_norm": 0.25825726985931396, + "learning_rate": 5.333333333333333e-05, + "loss": 1.7981, + "step": 200 + }, + { + "epoch": 0.0896121266161391, + "grad_norm": 0.26243504881858826, + "learning_rate": 5.360000000000001e-05, + "loss": 1.8343, + "step": 201 + }, + { + "epoch": 0.09005795809184128, + "grad_norm": 0.2816524803638458, + "learning_rate": 5.386666666666667e-05, + "loss": 1.7426, + "step": 202 + }, + { + "epoch": 0.09050378956754347, + "grad_norm": 0.29419633746147156, + "learning_rate": 5.413333333333333e-05, + "loss": 1.7996, + "step": 203 + }, + { + "epoch": 0.09094962104324565, + "grad_norm": 0.28757423162460327, + "learning_rate": 5.440000000000001e-05, + "loss": 1.7916, + "step": 204 + }, + { + "epoch": 0.09139545251894783, + "grad_norm": 0.2410411238670349, + "learning_rate": 5.466666666666667e-05, + "loss": 1.743, + "step": 205 + }, + { + "epoch": 0.09184128399465002, + "grad_norm": 0.3360425531864166, + "learning_rate": 5.493333333333334e-05, + "loss": 1.77, + "step": 206 + }, + { + "epoch": 0.0922871154703522, + "grad_norm": 0.25519052147865295, + "learning_rate": 5.52e-05, + "loss": 1.8308, + "step": 207 + }, + { + "epoch": 0.0927329469460544, + "grad_norm": 0.3632420003414154, + "learning_rate": 5.5466666666666675e-05, + "loss": 1.8317, + "step": 208 + }, + { + "epoch": 0.09317877842175658, + "grad_norm": 0.24667537212371826, + "learning_rate": 5.573333333333334e-05, + "loss": 1.7549, + "step": 209 + }, + { + "epoch": 0.09362460989745876, + "grad_norm": 0.3362181782722473, + "learning_rate": 5.6e-05, + "loss": 1.7526, + "step": 210 + }, + { + "epoch": 0.09407044137316095, + "grad_norm": 0.25742268562316895, + "learning_rate": 5.6266666666666675e-05, + "loss": 1.817, + "step": 211 + }, + { + "epoch": 0.09451627284886313, + "grad_norm": 0.3430984318256378, + "learning_rate": 5.6533333333333336e-05, + "loss": 1.7939, + "step": 212 + }, + { + "epoch": 0.09496210432456531, + "grad_norm": 0.2712782919406891, + "learning_rate": 5.6800000000000005e-05, + "loss": 1.7742, + "step": 213 + }, + { + "epoch": 0.0954079358002675, + "grad_norm": 0.29844462871551514, + "learning_rate": 5.7066666666666674e-05, + "loss": 1.7925, + "step": 214 + }, + { + "epoch": 0.09585376727596968, + "grad_norm": 0.2859255075454712, + "learning_rate": 5.7333333333333336e-05, + "loss": 1.8115, + "step": 215 + }, + { + "epoch": 0.09629959875167186, + "grad_norm": 0.2815805971622467, + "learning_rate": 5.7600000000000004e-05, + "loss": 1.7645, + "step": 216 + }, + { + "epoch": 0.09674543022737406, + "grad_norm": 0.25476354360580444, + "learning_rate": 5.786666666666667e-05, + "loss": 1.7237, + "step": 217 + }, + { + "epoch": 0.09719126170307624, + "grad_norm": 0.25315555930137634, + "learning_rate": 5.813333333333334e-05, + "loss": 1.8144, + "step": 218 + }, + { + "epoch": 0.09763709317877842, + "grad_norm": 0.3066805899143219, + "learning_rate": 5.84e-05, + "loss": 1.7694, + "step": 219 + }, + { + "epoch": 0.09808292465448061, + "grad_norm": 0.2467024177312851, + "learning_rate": 5.8666666666666665e-05, + "loss": 1.7698, + "step": 220 + }, + { + "epoch": 0.09852875613018279, + "grad_norm": 0.3097245395183563, + "learning_rate": 5.893333333333334e-05, + "loss": 1.731, + "step": 221 + }, + { + "epoch": 0.09897458760588497, + "grad_norm": 0.24703380465507507, + "learning_rate": 5.92e-05, + "loss": 1.8203, + "step": 222 + }, + { + "epoch": 0.09942041908158716, + "grad_norm": 0.251916766166687, + "learning_rate": 5.946666666666667e-05, + "loss": 1.7469, + "step": 223 + }, + { + "epoch": 0.09986625055728934, + "grad_norm": 0.2684166133403778, + "learning_rate": 5.973333333333334e-05, + "loss": 1.8091, + "step": 224 + }, + { + "epoch": 0.10031208203299152, + "grad_norm": 0.2632969319820404, + "learning_rate": 6.000000000000001e-05, + "loss": 1.7271, + "step": 225 + }, + { + "epoch": 0.10075791350869372, + "grad_norm": 0.2678864002227783, + "learning_rate": 6.026666666666667e-05, + "loss": 1.7441, + "step": 226 + }, + { + "epoch": 0.1012037449843959, + "grad_norm": 0.2720136046409607, + "learning_rate": 6.053333333333334e-05, + "loss": 1.7874, + "step": 227 + }, + { + "epoch": 0.10164957646009808, + "grad_norm": 0.3209216892719269, + "learning_rate": 6.080000000000001e-05, + "loss": 1.7705, + "step": 228 + }, + { + "epoch": 0.10209540793580027, + "grad_norm": 0.3064902126789093, + "learning_rate": 6.106666666666667e-05, + "loss": 1.8002, + "step": 229 + }, + { + "epoch": 0.10254123941150245, + "grad_norm": 0.31737932562828064, + "learning_rate": 6.133333333333334e-05, + "loss": 1.7382, + "step": 230 + }, + { + "epoch": 0.10298707088720464, + "grad_norm": 0.3056716322898865, + "learning_rate": 6.16e-05, + "loss": 1.8704, + "step": 231 + }, + { + "epoch": 0.10343290236290682, + "grad_norm": 0.29310256242752075, + "learning_rate": 6.186666666666668e-05, + "loss": 1.8472, + "step": 232 + }, + { + "epoch": 0.103878733838609, + "grad_norm": 0.2623426914215088, + "learning_rate": 6.213333333333333e-05, + "loss": 1.8431, + "step": 233 + }, + { + "epoch": 0.1043245653143112, + "grad_norm": 0.35625219345092773, + "learning_rate": 6.240000000000001e-05, + "loss": 1.8601, + "step": 234 + }, + { + "epoch": 0.10477039679001338, + "grad_norm": 0.2834986746311188, + "learning_rate": 6.266666666666667e-05, + "loss": 1.8366, + "step": 235 + }, + { + "epoch": 0.10521622826571556, + "grad_norm": 0.2918021082878113, + "learning_rate": 6.293333333333334e-05, + "loss": 1.828, + "step": 236 + }, + { + "epoch": 0.10566205974141775, + "grad_norm": 0.23207345604896545, + "learning_rate": 6.32e-05, + "loss": 1.6808, + "step": 237 + }, + { + "epoch": 0.10610789121711993, + "grad_norm": 0.28747454285621643, + "learning_rate": 6.346666666666667e-05, + "loss": 1.8516, + "step": 238 + }, + { + "epoch": 0.10655372269282211, + "grad_norm": 0.2337469756603241, + "learning_rate": 6.373333333333334e-05, + "loss": 1.7969, + "step": 239 + }, + { + "epoch": 0.1069995541685243, + "grad_norm": 0.2963745892047882, + "learning_rate": 6.400000000000001e-05, + "loss": 1.7612, + "step": 240 + }, + { + "epoch": 0.10744538564422648, + "grad_norm": 0.2667463719844818, + "learning_rate": 6.426666666666668e-05, + "loss": 1.6857, + "step": 241 + }, + { + "epoch": 0.10789121711992866, + "grad_norm": 0.27753686904907227, + "learning_rate": 6.453333333333333e-05, + "loss": 1.8426, + "step": 242 + }, + { + "epoch": 0.10833704859563086, + "grad_norm": 0.23864240944385529, + "learning_rate": 6.48e-05, + "loss": 1.7344, + "step": 243 + }, + { + "epoch": 0.10878288007133304, + "grad_norm": 0.28611189126968384, + "learning_rate": 6.506666666666667e-05, + "loss": 1.8943, + "step": 244 + }, + { + "epoch": 0.10922871154703521, + "grad_norm": 0.26439395546913147, + "learning_rate": 6.533333333333334e-05, + "loss": 1.853, + "step": 245 + }, + { + "epoch": 0.10967454302273741, + "grad_norm": 0.24167154729366302, + "learning_rate": 6.56e-05, + "loss": 1.7802, + "step": 246 + }, + { + "epoch": 0.11012037449843959, + "grad_norm": 0.2444562166929245, + "learning_rate": 6.586666666666668e-05, + "loss": 1.7193, + "step": 247 + }, + { + "epoch": 0.11056620597414177, + "grad_norm": 0.298610657453537, + "learning_rate": 6.613333333333333e-05, + "loss": 1.784, + "step": 248 + }, + { + "epoch": 0.11101203744984396, + "grad_norm": 0.23441357910633087, + "learning_rate": 6.64e-05, + "loss": 1.7624, + "step": 249 + }, + { + "epoch": 0.11145786892554614, + "grad_norm": 0.2940090000629425, + "learning_rate": 6.666666666666667e-05, + "loss": 1.8419, + "step": 250 + }, + { + "epoch": 0.11190370040124833, + "grad_norm": 0.2812560200691223, + "learning_rate": 6.693333333333334e-05, + "loss": 1.8512, + "step": 251 + }, + { + "epoch": 0.11234953187695051, + "grad_norm": 0.24187949299812317, + "learning_rate": 6.720000000000001e-05, + "loss": 1.7801, + "step": 252 + }, + { + "epoch": 0.1127953633526527, + "grad_norm": 0.2625133693218231, + "learning_rate": 6.746666666666668e-05, + "loss": 1.8104, + "step": 253 + }, + { + "epoch": 0.11324119482835489, + "grad_norm": 0.22561997175216675, + "learning_rate": 6.773333333333334e-05, + "loss": 1.809, + "step": 254 + }, + { + "epoch": 0.11368702630405707, + "grad_norm": 0.2781251072883606, + "learning_rate": 6.8e-05, + "loss": 1.81, + "step": 255 + }, + { + "epoch": 0.11413285777975925, + "grad_norm": 0.28370434045791626, + "learning_rate": 6.826666666666668e-05, + "loss": 1.7855, + "step": 256 + }, + { + "epoch": 0.11457868925546144, + "grad_norm": 0.2266968935728073, + "learning_rate": 6.853333333333334e-05, + "loss": 1.7562, + "step": 257 + }, + { + "epoch": 0.11502452073116362, + "grad_norm": 0.2713560163974762, + "learning_rate": 6.88e-05, + "loss": 1.7536, + "step": 258 + }, + { + "epoch": 0.1154703522068658, + "grad_norm": 0.24759072065353394, + "learning_rate": 6.906666666666667e-05, + "loss": 1.8368, + "step": 259 + }, + { + "epoch": 0.115916183682568, + "grad_norm": 0.24621137976646423, + "learning_rate": 6.933333333333334e-05, + "loss": 1.6647, + "step": 260 + }, + { + "epoch": 0.11636201515827017, + "grad_norm": 0.24362999200820923, + "learning_rate": 6.960000000000001e-05, + "loss": 1.7761, + "step": 261 + }, + { + "epoch": 0.11680784663397235, + "grad_norm": 0.2415657788515091, + "learning_rate": 6.986666666666667e-05, + "loss": 1.8489, + "step": 262 + }, + { + "epoch": 0.11725367810967455, + "grad_norm": 0.22340714931488037, + "learning_rate": 7.013333333333335e-05, + "loss": 1.7276, + "step": 263 + }, + { + "epoch": 0.11769950958537673, + "grad_norm": 0.24080178141593933, + "learning_rate": 7.04e-05, + "loss": 1.7753, + "step": 264 + }, + { + "epoch": 0.1181453410610789, + "grad_norm": 0.22115445137023926, + "learning_rate": 7.066666666666667e-05, + "loss": 1.7597, + "step": 265 + }, + { + "epoch": 0.1185911725367811, + "grad_norm": 0.22792184352874756, + "learning_rate": 7.093333333333334e-05, + "loss": 1.7139, + "step": 266 + }, + { + "epoch": 0.11903700401248328, + "grad_norm": 0.2419521063566208, + "learning_rate": 7.120000000000001e-05, + "loss": 1.7849, + "step": 267 + }, + { + "epoch": 0.11948283548818546, + "grad_norm": 0.24347807466983795, + "learning_rate": 7.146666666666666e-05, + "loss": 1.7569, + "step": 268 + }, + { + "epoch": 0.11992866696388765, + "grad_norm": 0.24143561720848083, + "learning_rate": 7.173333333333333e-05, + "loss": 1.8188, + "step": 269 + }, + { + "epoch": 0.12037449843958983, + "grad_norm": 0.30590054392814636, + "learning_rate": 7.2e-05, + "loss": 1.8558, + "step": 270 + }, + { + "epoch": 0.12082032991529201, + "grad_norm": 0.2658385634422302, + "learning_rate": 7.226666666666667e-05, + "loss": 1.8763, + "step": 271 + }, + { + "epoch": 0.1212661613909942, + "grad_norm": 0.24798420071601868, + "learning_rate": 7.253333333333334e-05, + "loss": 1.8042, + "step": 272 + }, + { + "epoch": 0.12171199286669639, + "grad_norm": 0.28150761127471924, + "learning_rate": 7.280000000000001e-05, + "loss": 1.8033, + "step": 273 + }, + { + "epoch": 0.12215782434239858, + "grad_norm": 0.2389529049396515, + "learning_rate": 7.306666666666668e-05, + "loss": 1.8046, + "step": 274 + }, + { + "epoch": 0.12260365581810076, + "grad_norm": 0.24898123741149902, + "learning_rate": 7.333333333333333e-05, + "loss": 1.7515, + "step": 275 + }, + { + "epoch": 0.12304948729380294, + "grad_norm": 0.24504360556602478, + "learning_rate": 7.360000000000001e-05, + "loss": 1.7288, + "step": 276 + }, + { + "epoch": 0.12349531876950513, + "grad_norm": 0.3016360104084015, + "learning_rate": 7.386666666666667e-05, + "loss": 1.9285, + "step": 277 + }, + { + "epoch": 0.12394115024520731, + "grad_norm": 0.23023012280464172, + "learning_rate": 7.413333333333334e-05, + "loss": 1.7739, + "step": 278 + }, + { + "epoch": 0.12438698172090949, + "grad_norm": 0.26158833503723145, + "learning_rate": 7.44e-05, + "loss": 1.751, + "step": 279 + }, + { + "epoch": 0.12483281319661169, + "grad_norm": 0.2571043372154236, + "learning_rate": 7.466666666666667e-05, + "loss": 1.8096, + "step": 280 + }, + { + "epoch": 0.12527864467231387, + "grad_norm": 0.2688402831554413, + "learning_rate": 7.493333333333334e-05, + "loss": 1.9102, + "step": 281 + }, + { + "epoch": 0.12572447614801605, + "grad_norm": 0.23819898068904877, + "learning_rate": 7.52e-05, + "loss": 1.7089, + "step": 282 + }, + { + "epoch": 0.12617030762371823, + "grad_norm": 0.2830815315246582, + "learning_rate": 7.546666666666668e-05, + "loss": 1.7647, + "step": 283 + }, + { + "epoch": 0.12661613909942043, + "grad_norm": 0.23446160554885864, + "learning_rate": 7.573333333333334e-05, + "loss": 1.7359, + "step": 284 + }, + { + "epoch": 0.1270619705751226, + "grad_norm": 0.2588684856891632, + "learning_rate": 7.6e-05, + "loss": 1.7247, + "step": 285 + }, + { + "epoch": 0.1275078020508248, + "grad_norm": 0.2301066815853119, + "learning_rate": 7.626666666666667e-05, + "loss": 1.8099, + "step": 286 + }, + { + "epoch": 0.12795363352652697, + "grad_norm": 0.2484956830739975, + "learning_rate": 7.653333333333334e-05, + "loss": 1.7122, + "step": 287 + }, + { + "epoch": 0.12839946500222915, + "grad_norm": 0.22871418297290802, + "learning_rate": 7.680000000000001e-05, + "loss": 1.7922, + "step": 288 + }, + { + "epoch": 0.12884529647793133, + "grad_norm": 0.23120200634002686, + "learning_rate": 7.706666666666668e-05, + "loss": 1.7302, + "step": 289 + }, + { + "epoch": 0.12929112795363354, + "grad_norm": 0.24377760291099548, + "learning_rate": 7.733333333333333e-05, + "loss": 1.7644, + "step": 290 + }, + { + "epoch": 0.12973695942933572, + "grad_norm": 0.27873891592025757, + "learning_rate": 7.76e-05, + "loss": 1.7708, + "step": 291 + }, + { + "epoch": 0.1301827909050379, + "grad_norm": 0.23908622562885284, + "learning_rate": 7.786666666666667e-05, + "loss": 1.8235, + "step": 292 + }, + { + "epoch": 0.13062862238074008, + "grad_norm": 0.2732592821121216, + "learning_rate": 7.813333333333334e-05, + "loss": 1.7909, + "step": 293 + }, + { + "epoch": 0.13107445385644226, + "grad_norm": 0.2262636423110962, + "learning_rate": 7.840000000000001e-05, + "loss": 1.7058, + "step": 294 + }, + { + "epoch": 0.13152028533214444, + "grad_norm": 0.251850962638855, + "learning_rate": 7.866666666666666e-05, + "loss": 1.813, + "step": 295 + }, + { + "epoch": 0.13196611680784665, + "grad_norm": 0.2817717492580414, + "learning_rate": 7.893333333333335e-05, + "loss": 1.8096, + "step": 296 + }, + { + "epoch": 0.13241194828354882, + "grad_norm": 0.2282685786485672, + "learning_rate": 7.92e-05, + "loss": 1.6759, + "step": 297 + }, + { + "epoch": 0.132857779759251, + "grad_norm": 0.2728259265422821, + "learning_rate": 7.946666666666667e-05, + "loss": 1.7838, + "step": 298 + }, + { + "epoch": 0.13330361123495318, + "grad_norm": 0.27094462513923645, + "learning_rate": 7.973333333333334e-05, + "loss": 1.7412, + "step": 299 + }, + { + "epoch": 0.13374944271065536, + "grad_norm": 0.24353022873401642, + "learning_rate": 8e-05, + "loss": 1.7562, + "step": 300 + }, + { + "epoch": 0.13419527418635754, + "grad_norm": 0.26686304807662964, + "learning_rate": 7.999994771416534e-05, + "loss": 1.8088, + "step": 301 + }, + { + "epoch": 0.13464110566205975, + "grad_norm": 0.23156635463237762, + "learning_rate": 7.999979085679805e-05, + "loss": 1.6609, + "step": 302 + }, + { + "epoch": 0.13508693713776193, + "grad_norm": 0.21843333542346954, + "learning_rate": 7.99995294283082e-05, + "loss": 1.7144, + "step": 303 + }, + { + "epoch": 0.1355327686134641, + "grad_norm": 0.23980362713336945, + "learning_rate": 7.999916342937923e-05, + "loss": 1.7998, + "step": 304 + }, + { + "epoch": 0.1359786000891663, + "grad_norm": 0.23711195588111877, + "learning_rate": 7.999869286096798e-05, + "loss": 1.8144, + "step": 305 + }, + { + "epoch": 0.13642443156486847, + "grad_norm": 0.2704579830169678, + "learning_rate": 7.999811772430466e-05, + "loss": 1.7427, + "step": 306 + }, + { + "epoch": 0.13687026304057068, + "grad_norm": 0.23453868925571442, + "learning_rate": 7.999743802089283e-05, + "loss": 1.6939, + "step": 307 + }, + { + "epoch": 0.13731609451627286, + "grad_norm": 0.21298548579216003, + "learning_rate": 7.999665375250944e-05, + "loss": 1.7517, + "step": 308 + }, + { + "epoch": 0.13776192599197504, + "grad_norm": 0.28733065724372864, + "learning_rate": 7.99957649212048e-05, + "loss": 1.7647, + "step": 309 + }, + { + "epoch": 0.13820775746767722, + "grad_norm": 0.2919608950614929, + "learning_rate": 7.999477152930256e-05, + "loss": 1.7727, + "step": 310 + }, + { + "epoch": 0.1386535889433794, + "grad_norm": 0.28628838062286377, + "learning_rate": 7.999367357939974e-05, + "loss": 1.8061, + "step": 311 + }, + { + "epoch": 0.13909942041908158, + "grad_norm": 0.2606216371059418, + "learning_rate": 7.999247107436672e-05, + "loss": 1.7097, + "step": 312 + }, + { + "epoch": 0.13954525189478378, + "grad_norm": 0.22490161657333374, + "learning_rate": 7.999116401734718e-05, + "loss": 1.7861, + "step": 313 + }, + { + "epoch": 0.13999108337048596, + "grad_norm": 0.287057489156723, + "learning_rate": 7.998975241175815e-05, + "loss": 1.7041, + "step": 314 + }, + { + "epoch": 0.14043691484618814, + "grad_norm": 0.22429852187633514, + "learning_rate": 7.998823626129e-05, + "loss": 1.7224, + "step": 315 + }, + { + "epoch": 0.14088274632189032, + "grad_norm": 0.23749588429927826, + "learning_rate": 7.998661556990636e-05, + "loss": 1.7259, + "step": 316 + }, + { + "epoch": 0.1413285777975925, + "grad_norm": 0.2275572419166565, + "learning_rate": 7.99848903418442e-05, + "loss": 1.7777, + "step": 317 + }, + { + "epoch": 0.14177440927329468, + "grad_norm": 0.21902650594711304, + "learning_rate": 7.998306058161378e-05, + "loss": 1.7444, + "step": 318 + }, + { + "epoch": 0.1422202407489969, + "grad_norm": 0.2763417959213257, + "learning_rate": 7.998112629399861e-05, + "loss": 1.8482, + "step": 319 + }, + { + "epoch": 0.14266607222469907, + "grad_norm": 0.2526148557662964, + "learning_rate": 7.99790874840555e-05, + "loss": 1.8765, + "step": 320 + }, + { + "epoch": 0.14311190370040125, + "grad_norm": 0.22051188349723816, + "learning_rate": 7.997694415711448e-05, + "loss": 1.7598, + "step": 321 + }, + { + "epoch": 0.14355773517610343, + "grad_norm": 0.23414510488510132, + "learning_rate": 7.997469631877883e-05, + "loss": 1.701, + "step": 322 + }, + { + "epoch": 0.1440035666518056, + "grad_norm": 0.22807630896568298, + "learning_rate": 7.997234397492508e-05, + "loss": 1.8516, + "step": 323 + }, + { + "epoch": 0.1444493981275078, + "grad_norm": 0.2263704240322113, + "learning_rate": 7.996988713170292e-05, + "loss": 1.7997, + "step": 324 + }, + { + "epoch": 0.14489522960321, + "grad_norm": 0.24178504943847656, + "learning_rate": 7.996732579553526e-05, + "loss": 1.7961, + "step": 325 + }, + { + "epoch": 0.14534106107891218, + "grad_norm": 0.2487363964319229, + "learning_rate": 7.996465997311817e-05, + "loss": 1.7727, + "step": 326 + }, + { + "epoch": 0.14578689255461436, + "grad_norm": 0.23811641335487366, + "learning_rate": 7.996188967142091e-05, + "loss": 1.8489, + "step": 327 + }, + { + "epoch": 0.14623272403031654, + "grad_norm": 0.2284778505563736, + "learning_rate": 7.995901489768584e-05, + "loss": 1.7402, + "step": 328 + }, + { + "epoch": 0.14667855550601872, + "grad_norm": 0.24267128109931946, + "learning_rate": 7.995603565942846e-05, + "loss": 1.6906, + "step": 329 + }, + { + "epoch": 0.14712438698172092, + "grad_norm": 0.2633998394012451, + "learning_rate": 7.995295196443737e-05, + "loss": 1.8315, + "step": 330 + }, + { + "epoch": 0.1475702184574231, + "grad_norm": 0.23922331631183624, + "learning_rate": 7.994976382077425e-05, + "loss": 1.7517, + "step": 331 + }, + { + "epoch": 0.14801604993312528, + "grad_norm": 0.2259657233953476, + "learning_rate": 7.994647123677384e-05, + "loss": 1.7938, + "step": 332 + }, + { + "epoch": 0.14846188140882746, + "grad_norm": 0.2336091250181198, + "learning_rate": 7.99430742210439e-05, + "loss": 1.7878, + "step": 333 + }, + { + "epoch": 0.14890771288452964, + "grad_norm": 0.21656788885593414, + "learning_rate": 7.993957278246525e-05, + "loss": 1.7411, + "step": 334 + }, + { + "epoch": 0.14935354436023182, + "grad_norm": 0.22250677645206451, + "learning_rate": 7.993596693019165e-05, + "loss": 1.8132, + "step": 335 + }, + { + "epoch": 0.14979937583593403, + "grad_norm": 0.258594274520874, + "learning_rate": 7.993225667364984e-05, + "loss": 1.7314, + "step": 336 + }, + { + "epoch": 0.1502452073116362, + "grad_norm": 0.2388758510351181, + "learning_rate": 7.992844202253953e-05, + "loss": 1.7738, + "step": 337 + }, + { + "epoch": 0.1506910387873384, + "grad_norm": 0.26715052127838135, + "learning_rate": 7.992452298683333e-05, + "loss": 1.7666, + "step": 338 + }, + { + "epoch": 0.15113687026304057, + "grad_norm": 0.22846515476703644, + "learning_rate": 7.992049957677674e-05, + "loss": 1.7664, + "step": 339 + }, + { + "epoch": 0.15158270173874275, + "grad_norm": 0.23906578123569489, + "learning_rate": 7.991637180288813e-05, + "loss": 1.8869, + "step": 340 + }, + { + "epoch": 0.15202853321444493, + "grad_norm": 0.24840490520000458, + "learning_rate": 7.99121396759587e-05, + "loss": 1.707, + "step": 341 + }, + { + "epoch": 0.15247436469014714, + "grad_norm": 0.23409156501293182, + "learning_rate": 7.990780320705246e-05, + "loss": 1.8279, + "step": 342 + }, + { + "epoch": 0.15292019616584931, + "grad_norm": 0.23515836894512177, + "learning_rate": 7.990336240750621e-05, + "loss": 1.7601, + "step": 343 + }, + { + "epoch": 0.1533660276415515, + "grad_norm": 0.2411307990550995, + "learning_rate": 7.98988172889295e-05, + "loss": 1.8165, + "step": 344 + }, + { + "epoch": 0.15381185911725367, + "grad_norm": 0.2911078631877899, + "learning_rate": 7.98941678632046e-05, + "loss": 1.7544, + "step": 345 + }, + { + "epoch": 0.15425769059295585, + "grad_norm": 0.23404845595359802, + "learning_rate": 7.988941414248645e-05, + "loss": 1.8192, + "step": 346 + }, + { + "epoch": 0.15470352206865803, + "grad_norm": 0.274189829826355, + "learning_rate": 7.988455613920267e-05, + "loss": 1.8439, + "step": 347 + }, + { + "epoch": 0.15514935354436024, + "grad_norm": 0.21792374551296234, + "learning_rate": 7.98795938660535e-05, + "loss": 1.8227, + "step": 348 + }, + { + "epoch": 0.15559518502006242, + "grad_norm": 0.27858689427375793, + "learning_rate": 7.987452733601179e-05, + "loss": 1.77, + "step": 349 + }, + { + "epoch": 0.1560410164957646, + "grad_norm": 0.22310331463813782, + "learning_rate": 7.986935656232287e-05, + "loss": 1.763, + "step": 350 + }, + { + "epoch": 0.15648684797146678, + "grad_norm": 0.22564561665058136, + "learning_rate": 7.98640815585047e-05, + "loss": 1.7049, + "step": 351 + }, + { + "epoch": 0.15693267944716896, + "grad_norm": 0.2216060310602188, + "learning_rate": 7.985870233834767e-05, + "loss": 1.7523, + "step": 352 + }, + { + "epoch": 0.15737851092287117, + "grad_norm": 0.23628509044647217, + "learning_rate": 7.985321891591463e-05, + "loss": 1.7974, + "step": 353 + }, + { + "epoch": 0.15782434239857335, + "grad_norm": 0.21217618882656097, + "learning_rate": 7.984763130554084e-05, + "loss": 1.7335, + "step": 354 + }, + { + "epoch": 0.15827017387427553, + "grad_norm": 0.2183927595615387, + "learning_rate": 7.984193952183394e-05, + "loss": 1.822, + "step": 355 + }, + { + "epoch": 0.1587160053499777, + "grad_norm": 0.22199095785617828, + "learning_rate": 7.983614357967392e-05, + "loss": 1.7597, + "step": 356 + }, + { + "epoch": 0.1591618368256799, + "grad_norm": 0.20965681970119476, + "learning_rate": 7.983024349421307e-05, + "loss": 1.7823, + "step": 357 + }, + { + "epoch": 0.15960766830138207, + "grad_norm": 0.21876397728919983, + "learning_rate": 7.982423928087593e-05, + "loss": 1.8127, + "step": 358 + }, + { + "epoch": 0.16005349977708427, + "grad_norm": 0.21204975247383118, + "learning_rate": 7.981813095535926e-05, + "loss": 1.69, + "step": 359 + }, + { + "epoch": 0.16049933125278645, + "grad_norm": 0.2185240387916565, + "learning_rate": 7.981191853363201e-05, + "loss": 1.8001, + "step": 360 + }, + { + "epoch": 0.16094516272848863, + "grad_norm": 0.23309868574142456, + "learning_rate": 7.980560203193525e-05, + "loss": 1.7638, + "step": 361 + }, + { + "epoch": 0.1613909942041908, + "grad_norm": 0.21918439865112305, + "learning_rate": 7.979918146678218e-05, + "loss": 1.7739, + "step": 362 + }, + { + "epoch": 0.161836825679893, + "grad_norm": 0.2592524588108063, + "learning_rate": 7.979265685495801e-05, + "loss": 1.7639, + "step": 363 + }, + { + "epoch": 0.16228265715559517, + "grad_norm": 0.21176396310329437, + "learning_rate": 7.978602821351999e-05, + "loss": 1.7888, + "step": 364 + }, + { + "epoch": 0.16272848863129738, + "grad_norm": 0.22291821241378784, + "learning_rate": 7.977929555979732e-05, + "loss": 1.6619, + "step": 365 + }, + { + "epoch": 0.16317432010699956, + "grad_norm": 0.21604986488819122, + "learning_rate": 7.977245891139113e-05, + "loss": 1.7488, + "step": 366 + }, + { + "epoch": 0.16362015158270174, + "grad_norm": 0.22686690092086792, + "learning_rate": 7.976551828617439e-05, + "loss": 1.8615, + "step": 367 + }, + { + "epoch": 0.16406598305840392, + "grad_norm": 0.2151118963956833, + "learning_rate": 7.975847370229194e-05, + "loss": 1.79, + "step": 368 + }, + { + "epoch": 0.1645118145341061, + "grad_norm": 0.21530570089817047, + "learning_rate": 7.975132517816038e-05, + "loss": 1.7706, + "step": 369 + }, + { + "epoch": 0.16495764600980828, + "grad_norm": 0.21484266221523285, + "learning_rate": 7.974407273246801e-05, + "loss": 1.8433, + "step": 370 + }, + { + "epoch": 0.1654034774855105, + "grad_norm": 0.2247444987297058, + "learning_rate": 7.973671638417488e-05, + "loss": 1.8342, + "step": 371 + }, + { + "epoch": 0.16584930896121267, + "grad_norm": 0.21698202192783356, + "learning_rate": 7.97292561525126e-05, + "loss": 1.7795, + "step": 372 + }, + { + "epoch": 0.16629514043691485, + "grad_norm": 0.21239005029201508, + "learning_rate": 7.97216920569844e-05, + "loss": 1.7897, + "step": 373 + }, + { + "epoch": 0.16674097191261703, + "grad_norm": 0.2313557267189026, + "learning_rate": 7.971402411736502e-05, + "loss": 1.7493, + "step": 374 + }, + { + "epoch": 0.1671868033883192, + "grad_norm": 0.2117127925157547, + "learning_rate": 7.97062523537007e-05, + "loss": 1.7789, + "step": 375 + }, + { + "epoch": 0.1676326348640214, + "grad_norm": 0.21416178345680237, + "learning_rate": 7.969837678630912e-05, + "loss": 1.7463, + "step": 376 + }, + { + "epoch": 0.1680784663397236, + "grad_norm": 0.2061980962753296, + "learning_rate": 7.969039743577929e-05, + "loss": 1.6982, + "step": 377 + }, + { + "epoch": 0.16852429781542577, + "grad_norm": 0.21651746332645416, + "learning_rate": 7.968231432297157e-05, + "loss": 1.7723, + "step": 378 + }, + { + "epoch": 0.16897012929112795, + "grad_norm": 0.24461179971694946, + "learning_rate": 7.967412746901754e-05, + "loss": 1.8304, + "step": 379 + }, + { + "epoch": 0.16941596076683013, + "grad_norm": 0.2126745879650116, + "learning_rate": 7.966583689532007e-05, + "loss": 1.7447, + "step": 380 + }, + { + "epoch": 0.1698617922425323, + "grad_norm": 0.24304448068141937, + "learning_rate": 7.965744262355313e-05, + "loss": 1.7735, + "step": 381 + }, + { + "epoch": 0.17030762371823452, + "grad_norm": 0.22968406975269318, + "learning_rate": 7.964894467566177e-05, + "loss": 1.7358, + "step": 382 + }, + { + "epoch": 0.1707534551939367, + "grad_norm": 0.20871512591838837, + "learning_rate": 7.964034307386211e-05, + "loss": 1.7411, + "step": 383 + }, + { + "epoch": 0.17119928666963888, + "grad_norm": 0.21672800183296204, + "learning_rate": 7.963163784064128e-05, + "loss": 1.6548, + "step": 384 + }, + { + "epoch": 0.17164511814534106, + "grad_norm": 0.20464417338371277, + "learning_rate": 7.962282899875727e-05, + "loss": 1.7081, + "step": 385 + }, + { + "epoch": 0.17209094962104324, + "grad_norm": 0.2229240983724594, + "learning_rate": 7.961391657123896e-05, + "loss": 1.7655, + "step": 386 + }, + { + "epoch": 0.17253678109674542, + "grad_norm": 0.21150527894496918, + "learning_rate": 7.960490058138604e-05, + "loss": 1.7853, + "step": 387 + }, + { + "epoch": 0.17298261257244762, + "grad_norm": 0.21915920078754425, + "learning_rate": 7.959578105276896e-05, + "loss": 1.7652, + "step": 388 + }, + { + "epoch": 0.1734284440481498, + "grad_norm": 0.24295909702777863, + "learning_rate": 7.95865580092288e-05, + "loss": 1.8354, + "step": 389 + }, + { + "epoch": 0.17387427552385198, + "grad_norm": 0.20863685011863708, + "learning_rate": 7.957723147487728e-05, + "loss": 1.7122, + "step": 390 + }, + { + "epoch": 0.17432010699955416, + "grad_norm": 0.2294648438692093, + "learning_rate": 7.956780147409672e-05, + "loss": 1.7097, + "step": 391 + }, + { + "epoch": 0.17476593847525634, + "grad_norm": 0.21071994304656982, + "learning_rate": 7.955826803153986e-05, + "loss": 1.8451, + "step": 392 + }, + { + "epoch": 0.17521176995095855, + "grad_norm": 0.2508149743080139, + "learning_rate": 7.954863117212992e-05, + "loss": 1.8128, + "step": 393 + }, + { + "epoch": 0.17565760142666073, + "grad_norm": 0.21691708266735077, + "learning_rate": 7.953889092106045e-05, + "loss": 1.718, + "step": 394 + }, + { + "epoch": 0.1761034329023629, + "grad_norm": 0.2262207269668579, + "learning_rate": 7.952904730379531e-05, + "loss": 1.7128, + "step": 395 + }, + { + "epoch": 0.1765492643780651, + "grad_norm": 0.24705760180950165, + "learning_rate": 7.951910034606858e-05, + "loss": 1.7341, + "step": 396 + }, + { + "epoch": 0.17699509585376727, + "grad_norm": 0.2729456126689911, + "learning_rate": 7.950905007388453e-05, + "loss": 1.7499, + "step": 397 + }, + { + "epoch": 0.17744092732946945, + "grad_norm": 0.21897609531879425, + "learning_rate": 7.949889651351749e-05, + "loss": 1.765, + "step": 398 + }, + { + "epoch": 0.17788675880517166, + "grad_norm": 0.289559006690979, + "learning_rate": 7.948863969151182e-05, + "loss": 1.7466, + "step": 399 + }, + { + "epoch": 0.17833259028087384, + "grad_norm": 0.20668886601924896, + "learning_rate": 7.947827963468187e-05, + "loss": 1.817, + "step": 400 + }, + { + "epoch": 0.17877842175657602, + "grad_norm": 0.26898351311683655, + "learning_rate": 7.946781637011181e-05, + "loss": 1.8633, + "step": 401 + }, + { + "epoch": 0.1792242532322782, + "grad_norm": 0.21890974044799805, + "learning_rate": 7.945724992515573e-05, + "loss": 1.7948, + "step": 402 + }, + { + "epoch": 0.17967008470798038, + "grad_norm": 0.220897376537323, + "learning_rate": 7.944658032743733e-05, + "loss": 1.7348, + "step": 403 + }, + { + "epoch": 0.18011591618368256, + "grad_norm": 0.22162188589572906, + "learning_rate": 7.94358076048501e-05, + "loss": 1.7777, + "step": 404 + }, + { + "epoch": 0.18056174765938476, + "grad_norm": 0.23272553086280823, + "learning_rate": 7.942493178555705e-05, + "loss": 1.8239, + "step": 405 + }, + { + "epoch": 0.18100757913508694, + "grad_norm": 0.2248050570487976, + "learning_rate": 7.941395289799076e-05, + "loss": 1.7756, + "step": 406 + }, + { + "epoch": 0.18145341061078912, + "grad_norm": 0.22719000279903412, + "learning_rate": 7.940287097085324e-05, + "loss": 1.6338, + "step": 407 + }, + { + "epoch": 0.1818992420864913, + "grad_norm": 0.21583989262580872, + "learning_rate": 7.939168603311591e-05, + "loss": 1.7401, + "step": 408 + }, + { + "epoch": 0.18234507356219348, + "grad_norm": 0.21622613072395325, + "learning_rate": 7.93803981140194e-05, + "loss": 1.7647, + "step": 409 + }, + { + "epoch": 0.18279090503789566, + "grad_norm": 0.22838523983955383, + "learning_rate": 7.936900724307366e-05, + "loss": 1.8249, + "step": 410 + }, + { + "epoch": 0.18323673651359787, + "grad_norm": 0.2663255035877228, + "learning_rate": 7.935751345005776e-05, + "loss": 1.8474, + "step": 411 + }, + { + "epoch": 0.18368256798930005, + "grad_norm": 0.20833280682563782, + "learning_rate": 7.93459167650198e-05, + "loss": 1.7485, + "step": 412 + }, + { + "epoch": 0.18412839946500223, + "grad_norm": 0.2584652006626129, + "learning_rate": 7.933421721827692e-05, + "loss": 1.7936, + "step": 413 + }, + { + "epoch": 0.1845742309407044, + "grad_norm": 0.21740388870239258, + "learning_rate": 7.932241484041512e-05, + "loss": 1.7765, + "step": 414 + }, + { + "epoch": 0.1850200624164066, + "grad_norm": 0.2323867678642273, + "learning_rate": 7.931050966228931e-05, + "loss": 1.7967, + "step": 415 + }, + { + "epoch": 0.1854658938921088, + "grad_norm": 0.21234050393104553, + "learning_rate": 7.929850171502304e-05, + "loss": 1.7576, + "step": 416 + }, + { + "epoch": 0.18591172536781098, + "grad_norm": 0.24422520399093628, + "learning_rate": 7.928639103000864e-05, + "loss": 1.7178, + "step": 417 + }, + { + "epoch": 0.18635755684351316, + "grad_norm": 0.20524504780769348, + "learning_rate": 7.927417763890693e-05, + "loss": 1.6605, + "step": 418 + }, + { + "epoch": 0.18680338831921534, + "grad_norm": 0.2728728652000427, + "learning_rate": 7.926186157364732e-05, + "loss": 1.7642, + "step": 419 + }, + { + "epoch": 0.18724921979491752, + "grad_norm": 0.2353711575269699, + "learning_rate": 7.924944286642754e-05, + "loss": 1.6901, + "step": 420 + }, + { + "epoch": 0.1876950512706197, + "grad_norm": 0.22307051718235016, + "learning_rate": 7.923692154971378e-05, + "loss": 1.7768, + "step": 421 + }, + { + "epoch": 0.1881408827463219, + "grad_norm": 0.20437411963939667, + "learning_rate": 7.922429765624037e-05, + "loss": 1.6671, + "step": 422 + }, + { + "epoch": 0.18858671422202408, + "grad_norm": 0.22397580742835999, + "learning_rate": 7.921157121900985e-05, + "loss": 1.8473, + "step": 423 + }, + { + "epoch": 0.18903254569772626, + "grad_norm": 0.27350035309791565, + "learning_rate": 7.919874227129286e-05, + "loss": 1.7446, + "step": 424 + }, + { + "epoch": 0.18947837717342844, + "grad_norm": 0.21913419663906097, + "learning_rate": 7.918581084662801e-05, + "loss": 1.8032, + "step": 425 + }, + { + "epoch": 0.18992420864913062, + "grad_norm": 0.2502184808254242, + "learning_rate": 7.917277697882181e-05, + "loss": 1.6962, + "step": 426 + }, + { + "epoch": 0.1903700401248328, + "grad_norm": 0.20700030028820038, + "learning_rate": 7.915964070194859e-05, + "loss": 1.7662, + "step": 427 + }, + { + "epoch": 0.190815871600535, + "grad_norm": 0.24439390003681183, + "learning_rate": 7.91464020503504e-05, + "loss": 1.7713, + "step": 428 + }, + { + "epoch": 0.1912617030762372, + "grad_norm": 0.2158062756061554, + "learning_rate": 7.913306105863698e-05, + "loss": 1.7541, + "step": 429 + }, + { + "epoch": 0.19170753455193937, + "grad_norm": 0.20007923245429993, + "learning_rate": 7.911961776168551e-05, + "loss": 1.7032, + "step": 430 + }, + { + "epoch": 0.19215336602764155, + "grad_norm": 0.219706729054451, + "learning_rate": 7.910607219464075e-05, + "loss": 1.8321, + "step": 431 + }, + { + "epoch": 0.19259919750334373, + "grad_norm": 0.22007739543914795, + "learning_rate": 7.909242439291474e-05, + "loss": 1.7962, + "step": 432 + }, + { + "epoch": 0.1930450289790459, + "grad_norm": 0.20321106910705566, + "learning_rate": 7.90786743921868e-05, + "loss": 1.6555, + "step": 433 + }, + { + "epoch": 0.19349086045474811, + "grad_norm": 0.22101330757141113, + "learning_rate": 7.906482222840347e-05, + "loss": 1.816, + "step": 434 + }, + { + "epoch": 0.1939366919304503, + "grad_norm": 0.21452194452285767, + "learning_rate": 7.905086793777834e-05, + "loss": 1.7222, + "step": 435 + }, + { + "epoch": 0.19438252340615247, + "grad_norm": 0.20491483807563782, + "learning_rate": 7.903681155679198e-05, + "loss": 1.779, + "step": 436 + }, + { + "epoch": 0.19482835488185465, + "grad_norm": 0.2175835222005844, + "learning_rate": 7.902265312219188e-05, + "loss": 1.7517, + "step": 437 + }, + { + "epoch": 0.19527418635755683, + "grad_norm": 0.2133513242006302, + "learning_rate": 7.900839267099233e-05, + "loss": 1.7672, + "step": 438 + }, + { + "epoch": 0.19572001783325904, + "grad_norm": 0.21768039464950562, + "learning_rate": 7.899403024047429e-05, + "loss": 1.7668, + "step": 439 + }, + { + "epoch": 0.19616584930896122, + "grad_norm": 0.19812998175621033, + "learning_rate": 7.897956586818536e-05, + "loss": 1.6268, + "step": 440 + }, + { + "epoch": 0.1966116807846634, + "grad_norm": 0.20877686142921448, + "learning_rate": 7.896499959193963e-05, + "loss": 1.7064, + "step": 441 + }, + { + "epoch": 0.19705751226036558, + "grad_norm": 0.2183704674243927, + "learning_rate": 7.895033144981759e-05, + "loss": 1.7475, + "step": 442 + }, + { + "epoch": 0.19750334373606776, + "grad_norm": 0.20506885647773743, + "learning_rate": 7.893556148016602e-05, + "loss": 1.7189, + "step": 443 + }, + { + "epoch": 0.19794917521176994, + "grad_norm": 0.20721159875392914, + "learning_rate": 7.892068972159797e-05, + "loss": 1.7609, + "step": 444 + }, + { + "epoch": 0.19839500668747215, + "grad_norm": 0.21451415121555328, + "learning_rate": 7.890571621299252e-05, + "loss": 1.8069, + "step": 445 + }, + { + "epoch": 0.19884083816317433, + "grad_norm": 0.2216353714466095, + "learning_rate": 7.889064099349483e-05, + "loss": 1.8198, + "step": 446 + }, + { + "epoch": 0.1992866696388765, + "grad_norm": 0.2052127569913864, + "learning_rate": 7.887546410251588e-05, + "loss": 1.7137, + "step": 447 + }, + { + "epoch": 0.1997325011145787, + "grad_norm": 0.202201709151268, + "learning_rate": 7.88601855797325e-05, + "loss": 1.7105, + "step": 448 + }, + { + "epoch": 0.20017833259028087, + "grad_norm": 0.23586085438728333, + "learning_rate": 7.884480546508724e-05, + "loss": 1.7372, + "step": 449 + }, + { + "epoch": 0.20062416406598305, + "grad_norm": 0.21336017549037933, + "learning_rate": 7.882932379878816e-05, + "loss": 1.7355, + "step": 450 + }, + { + "epoch": 0.20106999554168525, + "grad_norm": 0.19796158373355865, + "learning_rate": 7.881374062130888e-05, + "loss": 1.5849, + "step": 451 + }, + { + "epoch": 0.20151582701738743, + "grad_norm": 0.21551991999149323, + "learning_rate": 7.879805597338836e-05, + "loss": 1.7133, + "step": 452 + }, + { + "epoch": 0.2019616584930896, + "grad_norm": 0.21652403473854065, + "learning_rate": 7.878226989603085e-05, + "loss": 1.712, + "step": 453 + }, + { + "epoch": 0.2024074899687918, + "grad_norm": 0.23366712033748627, + "learning_rate": 7.876638243050577e-05, + "loss": 1.8546, + "step": 454 + }, + { + "epoch": 0.20285332144449397, + "grad_norm": 0.2219308465719223, + "learning_rate": 7.875039361834758e-05, + "loss": 1.7547, + "step": 455 + }, + { + "epoch": 0.20329915292019615, + "grad_norm": 0.22077837586402893, + "learning_rate": 7.873430350135569e-05, + "loss": 1.6704, + "step": 456 + }, + { + "epoch": 0.20374498439589836, + "grad_norm": 0.21282720565795898, + "learning_rate": 7.871811212159439e-05, + "loss": 1.7612, + "step": 457 + }, + { + "epoch": 0.20419081587160054, + "grad_norm": 0.21781127154827118, + "learning_rate": 7.870181952139263e-05, + "loss": 1.7018, + "step": 458 + }, + { + "epoch": 0.20463664734730272, + "grad_norm": 0.2093805968761444, + "learning_rate": 7.868542574334404e-05, + "loss": 1.6885, + "step": 459 + }, + { + "epoch": 0.2050824788230049, + "grad_norm": 0.22918467223644257, + "learning_rate": 7.866893083030675e-05, + "loss": 1.7805, + "step": 460 + }, + { + "epoch": 0.20552831029870708, + "grad_norm": 0.21219949424266815, + "learning_rate": 7.865233482540328e-05, + "loss": 1.7359, + "step": 461 + }, + { + "epoch": 0.2059741417744093, + "grad_norm": 0.2140708565711975, + "learning_rate": 7.86356377720204e-05, + "loss": 1.7639, + "step": 462 + }, + { + "epoch": 0.20641997325011147, + "grad_norm": 0.23221629858016968, + "learning_rate": 7.861883971380908e-05, + "loss": 1.8052, + "step": 463 + }, + { + "epoch": 0.20686580472581365, + "grad_norm": 0.21740645170211792, + "learning_rate": 7.860194069468437e-05, + "loss": 1.824, + "step": 464 + }, + { + "epoch": 0.20731163620151583, + "grad_norm": 0.21377727389335632, + "learning_rate": 7.85849407588252e-05, + "loss": 1.7575, + "step": 465 + }, + { + "epoch": 0.207757467677218, + "grad_norm": 0.2288164347410202, + "learning_rate": 7.856783995067441e-05, + "loss": 1.7964, + "step": 466 + }, + { + "epoch": 0.20820329915292018, + "grad_norm": 0.21703492105007172, + "learning_rate": 7.855063831493846e-05, + "loss": 1.7686, + "step": 467 + }, + { + "epoch": 0.2086491306286224, + "grad_norm": 0.20621813833713531, + "learning_rate": 7.853333589658747e-05, + "loss": 1.6916, + "step": 468 + }, + { + "epoch": 0.20909496210432457, + "grad_norm": 0.20889562368392944, + "learning_rate": 7.851593274085499e-05, + "loss": 1.802, + "step": 469 + }, + { + "epoch": 0.20954079358002675, + "grad_norm": 0.20696090161800385, + "learning_rate": 7.849842889323796e-05, + "loss": 1.7314, + "step": 470 + }, + { + "epoch": 0.20998662505572893, + "grad_norm": 0.2312188297510147, + "learning_rate": 7.848082439949654e-05, + "loss": 1.8498, + "step": 471 + }, + { + "epoch": 0.2104324565314311, + "grad_norm": 0.20535381138324738, + "learning_rate": 7.8463119305654e-05, + "loss": 1.8197, + "step": 472 + }, + { + "epoch": 0.2108782880071333, + "grad_norm": 0.2188010960817337, + "learning_rate": 7.844531365799666e-05, + "loss": 1.7539, + "step": 473 + }, + { + "epoch": 0.2113241194828355, + "grad_norm": 0.20804068446159363, + "learning_rate": 7.842740750307362e-05, + "loss": 1.7942, + "step": 474 + }, + { + "epoch": 0.21176995095853768, + "grad_norm": 0.2164592146873474, + "learning_rate": 7.840940088769683e-05, + "loss": 1.7582, + "step": 475 + }, + { + "epoch": 0.21221578243423986, + "grad_norm": 0.2279512584209442, + "learning_rate": 7.839129385894082e-05, + "loss": 1.7594, + "step": 476 + }, + { + "epoch": 0.21266161390994204, + "grad_norm": 0.20543353259563446, + "learning_rate": 7.837308646414267e-05, + "loss": 1.7261, + "step": 477 + }, + { + "epoch": 0.21310744538564422, + "grad_norm": 0.2820877432823181, + "learning_rate": 7.835477875090179e-05, + "loss": 1.7853, + "step": 478 + }, + { + "epoch": 0.2135532768613464, + "grad_norm": 0.24093766510486603, + "learning_rate": 7.833637076707991e-05, + "loss": 1.8439, + "step": 479 + }, + { + "epoch": 0.2139991083370486, + "grad_norm": 0.217599555850029, + "learning_rate": 7.831786256080085e-05, + "loss": 1.7984, + "step": 480 + }, + { + "epoch": 0.21444493981275078, + "grad_norm": 0.23980188369750977, + "learning_rate": 7.829925418045048e-05, + "loss": 1.7762, + "step": 481 + }, + { + "epoch": 0.21489077128845296, + "grad_norm": 0.22318458557128906, + "learning_rate": 7.828054567467652e-05, + "loss": 1.7612, + "step": 482 + }, + { + "epoch": 0.21533660276415514, + "grad_norm": 0.21457462012767792, + "learning_rate": 7.826173709238846e-05, + "loss": 1.7034, + "step": 483 + }, + { + "epoch": 0.21578243423985732, + "grad_norm": 0.2079022228717804, + "learning_rate": 7.824282848275741e-05, + "loss": 1.8033, + "step": 484 + }, + { + "epoch": 0.21622826571555953, + "grad_norm": 0.20726555585861206, + "learning_rate": 7.822381989521605e-05, + "loss": 1.7805, + "step": 485 + }, + { + "epoch": 0.2166740971912617, + "grad_norm": 0.2167215645313263, + "learning_rate": 7.820471137945831e-05, + "loss": 1.8171, + "step": 486 + }, + { + "epoch": 0.2171199286669639, + "grad_norm": 0.21359805762767792, + "learning_rate": 7.818550298543944e-05, + "loss": 1.7444, + "step": 487 + }, + { + "epoch": 0.21756576014266607, + "grad_norm": 0.20531319081783295, + "learning_rate": 7.81661947633758e-05, + "loss": 1.7091, + "step": 488 + }, + { + "epoch": 0.21801159161836825, + "grad_norm": 0.20193392038345337, + "learning_rate": 7.814678676374471e-05, + "loss": 1.7053, + "step": 489 + }, + { + "epoch": 0.21845742309407043, + "grad_norm": 0.2035619169473648, + "learning_rate": 7.812727903728433e-05, + "loss": 1.696, + "step": 490 + }, + { + "epoch": 0.21890325456977264, + "grad_norm": 0.20180848240852356, + "learning_rate": 7.810767163499358e-05, + "loss": 1.7959, + "step": 491 + }, + { + "epoch": 0.21934908604547482, + "grad_norm": 0.21526241302490234, + "learning_rate": 7.80879646081319e-05, + "loss": 1.8276, + "step": 492 + }, + { + "epoch": 0.219794917521177, + "grad_norm": 0.20258860290050507, + "learning_rate": 7.806815800821922e-05, + "loss": 1.6339, + "step": 493 + }, + { + "epoch": 0.22024074899687918, + "grad_norm": 0.2058095633983612, + "learning_rate": 7.804825188703575e-05, + "loss": 1.7035, + "step": 494 + }, + { + "epoch": 0.22068658047258136, + "grad_norm": 0.1978922039270401, + "learning_rate": 7.802824629662192e-05, + "loss": 1.6683, + "step": 495 + }, + { + "epoch": 0.22113241194828354, + "grad_norm": 0.21317130327224731, + "learning_rate": 7.800814128927819e-05, + "loss": 1.8152, + "step": 496 + }, + { + "epoch": 0.22157824342398574, + "grad_norm": 0.1988740861415863, + "learning_rate": 7.798793691756488e-05, + "loss": 1.65, + "step": 497 + }, + { + "epoch": 0.22202407489968792, + "grad_norm": 0.20139382779598236, + "learning_rate": 7.796763323430215e-05, + "loss": 1.6872, + "step": 498 + }, + { + "epoch": 0.2224699063753901, + "grad_norm": 0.20387861132621765, + "learning_rate": 7.794723029256971e-05, + "loss": 1.7308, + "step": 499 + }, + { + "epoch": 0.22291573785109228, + "grad_norm": 0.204574853181839, + "learning_rate": 7.792672814570683e-05, + "loss": 1.7583, + "step": 500 + }, + { + "epoch": 0.22336156932679446, + "grad_norm": 0.2186586558818817, + "learning_rate": 7.790612684731211e-05, + "loss": 1.8411, + "step": 501 + }, + { + "epoch": 0.22380740080249667, + "grad_norm": 0.22103360295295715, + "learning_rate": 7.788542645124333e-05, + "loss": 1.8295, + "step": 502 + }, + { + "epoch": 0.22425323227819885, + "grad_norm": 0.2011125087738037, + "learning_rate": 7.786462701161738e-05, + "loss": 1.7213, + "step": 503 + }, + { + "epoch": 0.22469906375390103, + "grad_norm": 0.21138082444667816, + "learning_rate": 7.784372858281006e-05, + "loss": 1.7867, + "step": 504 + }, + { + "epoch": 0.2251448952296032, + "grad_norm": 0.20400221645832062, + "learning_rate": 7.782273121945594e-05, + "loss": 1.7661, + "step": 505 + }, + { + "epoch": 0.2255907267053054, + "grad_norm": 0.19883286952972412, + "learning_rate": 7.780163497644829e-05, + "loss": 1.7831, + "step": 506 + }, + { + "epoch": 0.22603655818100757, + "grad_norm": 0.20815902948379517, + "learning_rate": 7.778043990893882e-05, + "loss": 1.7682, + "step": 507 + }, + { + "epoch": 0.22648238965670978, + "grad_norm": 0.20501793920993805, + "learning_rate": 7.775914607233761e-05, + "loss": 1.7769, + "step": 508 + }, + { + "epoch": 0.22692822113241196, + "grad_norm": 0.2141670286655426, + "learning_rate": 7.773775352231299e-05, + "loss": 1.7864, + "step": 509 + }, + { + "epoch": 0.22737405260811414, + "grad_norm": 0.1976698487997055, + "learning_rate": 7.771626231479133e-05, + "loss": 1.7345, + "step": 510 + }, + { + "epoch": 0.22781988408381632, + "grad_norm": 0.2203235626220703, + "learning_rate": 7.769467250595688e-05, + "loss": 1.7741, + "step": 511 + }, + { + "epoch": 0.2282657155595185, + "grad_norm": 0.20633700489997864, + "learning_rate": 7.767298415225172e-05, + "loss": 1.6671, + "step": 512 + }, + { + "epoch": 0.22871154703522067, + "grad_norm": 0.21930013597011566, + "learning_rate": 7.765119731037554e-05, + "loss": 1.6226, + "step": 513 + }, + { + "epoch": 0.22915737851092288, + "grad_norm": 0.2221810221672058, + "learning_rate": 7.762931203728547e-05, + "loss": 1.7949, + "step": 514 + }, + { + "epoch": 0.22960320998662506, + "grad_norm": 0.2116052806377411, + "learning_rate": 7.760732839019604e-05, + "loss": 1.7742, + "step": 515 + }, + { + "epoch": 0.23004904146232724, + "grad_norm": 0.22342365980148315, + "learning_rate": 7.758524642657887e-05, + "loss": 1.8095, + "step": 516 + }, + { + "epoch": 0.23049487293802942, + "grad_norm": 0.19809666275978088, + "learning_rate": 7.756306620416272e-05, + "loss": 1.6623, + "step": 517 + }, + { + "epoch": 0.2309407044137316, + "grad_norm": 0.2312929928302765, + "learning_rate": 7.75407877809331e-05, + "loss": 1.7861, + "step": 518 + }, + { + "epoch": 0.23138653588943378, + "grad_norm": 0.21036399900913239, + "learning_rate": 7.751841121513235e-05, + "loss": 1.7913, + "step": 519 + }, + { + "epoch": 0.231832367365136, + "grad_norm": 0.21546313166618347, + "learning_rate": 7.749593656525931e-05, + "loss": 1.7115, + "step": 520 + }, + { + "epoch": 0.23227819884083817, + "grad_norm": 0.21037909388542175, + "learning_rate": 7.747336389006928e-05, + "loss": 1.7683, + "step": 521 + }, + { + "epoch": 0.23272403031654035, + "grad_norm": 0.2078377604484558, + "learning_rate": 7.745069324857383e-05, + "loss": 1.7656, + "step": 522 + }, + { + "epoch": 0.23316986179224253, + "grad_norm": 0.21215571463108063, + "learning_rate": 7.742792470004062e-05, + "loss": 1.7216, + "step": 523 + }, + { + "epoch": 0.2336156932679447, + "grad_norm": 0.19274559617042542, + "learning_rate": 7.740505830399329e-05, + "loss": 1.6717, + "step": 524 + }, + { + "epoch": 0.23406152474364691, + "grad_norm": 0.2016606479883194, + "learning_rate": 7.738209412021124e-05, + "loss": 1.7287, + "step": 525 + }, + { + "epoch": 0.2345073562193491, + "grad_norm": 0.20804037153720856, + "learning_rate": 7.735903220872958e-05, + "loss": 1.7927, + "step": 526 + }, + { + "epoch": 0.23495318769505127, + "grad_norm": 0.22181366384029388, + "learning_rate": 7.733587262983888e-05, + "loss": 1.835, + "step": 527 + }, + { + "epoch": 0.23539901917075345, + "grad_norm": 0.20415058732032776, + "learning_rate": 7.7312615444085e-05, + "loss": 1.7226, + "step": 528 + }, + { + "epoch": 0.23584485064645563, + "grad_norm": 0.21193745732307434, + "learning_rate": 7.728926071226902e-05, + "loss": 1.8005, + "step": 529 + }, + { + "epoch": 0.2362906821221578, + "grad_norm": 0.21242454648017883, + "learning_rate": 7.726580849544704e-05, + "loss": 1.7163, + "step": 530 + }, + { + "epoch": 0.23673651359786002, + "grad_norm": 0.20869044959545135, + "learning_rate": 7.724225885492998e-05, + "loss": 1.7958, + "step": 531 + }, + { + "epoch": 0.2371823450735622, + "grad_norm": 0.21901239454746246, + "learning_rate": 7.721861185228347e-05, + "loss": 1.7394, + "step": 532 + }, + { + "epoch": 0.23762817654926438, + "grad_norm": 0.20783841609954834, + "learning_rate": 7.719486754932768e-05, + "loss": 1.7592, + "step": 533 + }, + { + "epoch": 0.23807400802496656, + "grad_norm": 0.2176273763179779, + "learning_rate": 7.717102600813716e-05, + "loss": 1.7896, + "step": 534 + }, + { + "epoch": 0.23851983950066874, + "grad_norm": 0.2128293663263321, + "learning_rate": 7.714708729104062e-05, + "loss": 1.7401, + "step": 535 + }, + { + "epoch": 0.23896567097637092, + "grad_norm": 0.2054302990436554, + "learning_rate": 7.712305146062087e-05, + "loss": 1.7348, + "step": 536 + }, + { + "epoch": 0.23941150245207313, + "grad_norm": 0.2005021572113037, + "learning_rate": 7.709891857971458e-05, + "loss": 1.76, + "step": 537 + }, + { + "epoch": 0.2398573339277753, + "grad_norm": 0.2186511605978012, + "learning_rate": 7.707468871141215e-05, + "loss": 1.8412, + "step": 538 + }, + { + "epoch": 0.2403031654034775, + "grad_norm": 0.204304039478302, + "learning_rate": 7.705036191905752e-05, + "loss": 1.703, + "step": 539 + }, + { + "epoch": 0.24074899687917967, + "grad_norm": 0.2037625014781952, + "learning_rate": 7.702593826624801e-05, + "loss": 1.7164, + "step": 540 + }, + { + "epoch": 0.24119482835488185, + "grad_norm": 0.2177598923444748, + "learning_rate": 7.700141781683418e-05, + "loss": 1.738, + "step": 541 + }, + { + "epoch": 0.24164065983058403, + "grad_norm": 0.21556971967220306, + "learning_rate": 7.697680063491964e-05, + "loss": 1.6842, + "step": 542 + }, + { + "epoch": 0.24208649130628623, + "grad_norm": 0.2162875533103943, + "learning_rate": 7.695208678486088e-05, + "loss": 1.7615, + "step": 543 + }, + { + "epoch": 0.2425323227819884, + "grad_norm": 0.21224163472652435, + "learning_rate": 7.692727633126714e-05, + "loss": 1.7354, + "step": 544 + }, + { + "epoch": 0.2429781542576906, + "grad_norm": 0.2110309898853302, + "learning_rate": 7.690236933900014e-05, + "loss": 1.6944, + "step": 545 + }, + { + "epoch": 0.24342398573339277, + "grad_norm": 0.20104189217090607, + "learning_rate": 7.687736587317407e-05, + "loss": 1.7314, + "step": 546 + }, + { + "epoch": 0.24386981720909495, + "grad_norm": 0.21651782095432281, + "learning_rate": 7.685226599915524e-05, + "loss": 1.7696, + "step": 547 + }, + { + "epoch": 0.24431564868479716, + "grad_norm": 0.206302210688591, + "learning_rate": 7.682706978256208e-05, + "loss": 1.7771, + "step": 548 + }, + { + "epoch": 0.24476148016049934, + "grad_norm": 0.20536518096923828, + "learning_rate": 7.680177728926483e-05, + "loss": 1.7412, + "step": 549 + }, + { + "epoch": 0.24520731163620152, + "grad_norm": 0.2088993638753891, + "learning_rate": 7.677638858538544e-05, + "loss": 1.7634, + "step": 550 + }, + { + "epoch": 0.2456531431119037, + "grad_norm": 0.20735305547714233, + "learning_rate": 7.675090373729741e-05, + "loss": 1.7782, + "step": 551 + }, + { + "epoch": 0.24609897458760588, + "grad_norm": 0.2111474722623825, + "learning_rate": 7.672532281162556e-05, + "loss": 1.7773, + "step": 552 + }, + { + "epoch": 0.24654480606330806, + "grad_norm": 0.19856305420398712, + "learning_rate": 7.669964587524588e-05, + "loss": 1.6685, + "step": 553 + }, + { + "epoch": 0.24699063753901027, + "grad_norm": 0.20613139867782593, + "learning_rate": 7.66738729952854e-05, + "loss": 1.6916, + "step": 554 + }, + { + "epoch": 0.24743646901471245, + "grad_norm": 0.20895972847938538, + "learning_rate": 7.664800423912191e-05, + "loss": 1.7875, + "step": 555 + }, + { + "epoch": 0.24788230049041463, + "grad_norm": 0.20388461649417877, + "learning_rate": 7.662203967438391e-05, + "loss": 1.6645, + "step": 556 + }, + { + "epoch": 0.2483281319661168, + "grad_norm": 0.20363497734069824, + "learning_rate": 7.659597936895034e-05, + "loss": 1.755, + "step": 557 + }, + { + "epoch": 0.24877396344181898, + "grad_norm": 0.21989643573760986, + "learning_rate": 7.656982339095045e-05, + "loss": 1.8409, + "step": 558 + }, + { + "epoch": 0.24921979491752116, + "grad_norm": 0.19894006848335266, + "learning_rate": 7.654357180876358e-05, + "loss": 1.7576, + "step": 559 + }, + { + "epoch": 0.24966562639322337, + "grad_norm": 0.21573615074157715, + "learning_rate": 7.651722469101905e-05, + "loss": 1.7228, + "step": 560 + }, + { + "epoch": 0.2501114578689255, + "grad_norm": 0.24178695678710938, + "learning_rate": 7.649078210659587e-05, + "loss": 1.7428, + "step": 561 + }, + { + "epoch": 0.25055728934462773, + "grad_norm": 0.21728956699371338, + "learning_rate": 7.646424412462273e-05, + "loss": 1.8551, + "step": 562 + }, + { + "epoch": 0.25100312082032994, + "grad_norm": 0.21060433983802795, + "learning_rate": 7.643761081447758e-05, + "loss": 1.7606, + "step": 563 + }, + { + "epoch": 0.2514489522960321, + "grad_norm": 0.22947797179222107, + "learning_rate": 7.641088224578773e-05, + "loss": 1.7306, + "step": 564 + }, + { + "epoch": 0.2518947837717343, + "grad_norm": 0.21212172508239746, + "learning_rate": 7.638405848842945e-05, + "loss": 1.7445, + "step": 565 + }, + { + "epoch": 0.25234061524743645, + "grad_norm": 0.21673139929771423, + "learning_rate": 7.635713961252782e-05, + "loss": 1.7556, + "step": 566 + }, + { + "epoch": 0.25278644672313866, + "grad_norm": 0.22516381740570068, + "learning_rate": 7.633012568845668e-05, + "loss": 1.7493, + "step": 567 + }, + { + "epoch": 0.25323227819884087, + "grad_norm": 0.23668955266475677, + "learning_rate": 7.63030167868383e-05, + "loss": 1.7352, + "step": 568 + }, + { + "epoch": 0.253678109674543, + "grad_norm": 0.22791285812854767, + "learning_rate": 7.627581297854322e-05, + "loss": 1.7048, + "step": 569 + }, + { + "epoch": 0.2541239411502452, + "grad_norm": 0.24229630827903748, + "learning_rate": 7.624851433469017e-05, + "loss": 1.7953, + "step": 570 + }, + { + "epoch": 0.2545697726259474, + "grad_norm": 0.20149646699428558, + "learning_rate": 7.622112092664577e-05, + "loss": 1.7353, + "step": 571 + }, + { + "epoch": 0.2550156041016496, + "grad_norm": 0.21539618074893951, + "learning_rate": 7.619363282602436e-05, + "loss": 1.6643, + "step": 572 + }, + { + "epoch": 0.25546143557735174, + "grad_norm": 0.23108653724193573, + "learning_rate": 7.616605010468787e-05, + "loss": 1.77, + "step": 573 + }, + { + "epoch": 0.25590726705305394, + "grad_norm": 0.21947409212589264, + "learning_rate": 7.613837283474559e-05, + "loss": 1.7714, + "step": 574 + }, + { + "epoch": 0.25635309852875615, + "grad_norm": 0.2154000699520111, + "learning_rate": 7.611060108855393e-05, + "loss": 1.7469, + "step": 575 + }, + { + "epoch": 0.2567989300044583, + "grad_norm": 0.23046647012233734, + "learning_rate": 7.608273493871639e-05, + "loss": 1.7607, + "step": 576 + }, + { + "epoch": 0.2572447614801605, + "grad_norm": 0.23710086941719055, + "learning_rate": 7.605477445808318e-05, + "loss": 1.7796, + "step": 577 + }, + { + "epoch": 0.25769059295586266, + "grad_norm": 0.20885971188545227, + "learning_rate": 7.602671971975118e-05, + "loss": 1.816, + "step": 578 + }, + { + "epoch": 0.25813642443156487, + "grad_norm": 0.22010734677314758, + "learning_rate": 7.599857079706364e-05, + "loss": 1.7466, + "step": 579 + }, + { + "epoch": 0.2585822559072671, + "grad_norm": 0.21547986567020416, + "learning_rate": 7.597032776361007e-05, + "loss": 1.7816, + "step": 580 + }, + { + "epoch": 0.25902808738296923, + "grad_norm": 0.19855529069900513, + "learning_rate": 7.594199069322598e-05, + "loss": 1.7245, + "step": 581 + }, + { + "epoch": 0.25947391885867144, + "grad_norm": 0.19900168478488922, + "learning_rate": 7.591355965999276e-05, + "loss": 1.7174, + "step": 582 + }, + { + "epoch": 0.2599197503343736, + "grad_norm": 0.2193600833415985, + "learning_rate": 7.588503473823742e-05, + "loss": 1.7096, + "step": 583 + }, + { + "epoch": 0.2603655818100758, + "grad_norm": 0.21309758722782135, + "learning_rate": 7.585641600253242e-05, + "loss": 1.7349, + "step": 584 + }, + { + "epoch": 0.26081141328577795, + "grad_norm": 0.20293527841567993, + "learning_rate": 7.582770352769548e-05, + "loss": 1.7084, + "step": 585 + }, + { + "epoch": 0.26125724476148016, + "grad_norm": 0.20566365122795105, + "learning_rate": 7.57988973887894e-05, + "loss": 1.6904, + "step": 586 + }, + { + "epoch": 0.26170307623718236, + "grad_norm": 0.21862030029296875, + "learning_rate": 7.576999766112183e-05, + "loss": 1.7699, + "step": 587 + }, + { + "epoch": 0.2621489077128845, + "grad_norm": 0.22617413103580475, + "learning_rate": 7.574100442024507e-05, + "loss": 1.9066, + "step": 588 + }, + { + "epoch": 0.2625947391885867, + "grad_norm": 0.23065897822380066, + "learning_rate": 7.571191774195592e-05, + "loss": 1.7946, + "step": 589 + }, + { + "epoch": 0.2630405706642889, + "grad_norm": 0.21127870678901672, + "learning_rate": 7.568273770229546e-05, + "loss": 1.7289, + "step": 590 + }, + { + "epoch": 0.2634864021399911, + "grad_norm": 0.2256181687116623, + "learning_rate": 7.565346437754881e-05, + "loss": 1.7918, + "step": 591 + }, + { + "epoch": 0.2639322336156933, + "grad_norm": 0.2192109078168869, + "learning_rate": 7.562409784424499e-05, + "loss": 1.7303, + "step": 592 + }, + { + "epoch": 0.26437806509139544, + "grad_norm": 0.20271852612495422, + "learning_rate": 7.559463817915666e-05, + "loss": 1.7136, + "step": 593 + }, + { + "epoch": 0.26482389656709765, + "grad_norm": 0.2233620136976242, + "learning_rate": 7.556508545930001e-05, + "loss": 1.6845, + "step": 594 + }, + { + "epoch": 0.2652697280427998, + "grad_norm": 0.24921123683452606, + "learning_rate": 7.553543976193446e-05, + "loss": 1.7055, + "step": 595 + }, + { + "epoch": 0.265715559518502, + "grad_norm": 0.21758922934532166, + "learning_rate": 7.55057011645625e-05, + "loss": 1.6639, + "step": 596 + }, + { + "epoch": 0.2661613909942042, + "grad_norm": 0.22392435371875763, + "learning_rate": 7.547586974492951e-05, + "loss": 1.7267, + "step": 597 + }, + { + "epoch": 0.26660722246990637, + "grad_norm": 0.2134278565645218, + "learning_rate": 7.544594558102352e-05, + "loss": 1.7213, + "step": 598 + }, + { + "epoch": 0.2670530539456086, + "grad_norm": 0.21148909628391266, + "learning_rate": 7.541592875107504e-05, + "loss": 1.7874, + "step": 599 + }, + { + "epoch": 0.26749888542131073, + "grad_norm": 0.22129899263381958, + "learning_rate": 7.538581933355681e-05, + "loss": 1.714, + "step": 600 + }, + { + "epoch": 0.26794471689701294, + "grad_norm": 0.23374201357364655, + "learning_rate": 7.535561740718362e-05, + "loss": 1.7103, + "step": 601 + }, + { + "epoch": 0.2683905483727151, + "grad_norm": 0.21001872420310974, + "learning_rate": 7.532532305091211e-05, + "loss": 1.6265, + "step": 602 + }, + { + "epoch": 0.2688363798484173, + "grad_norm": 0.23460710048675537, + "learning_rate": 7.52949363439406e-05, + "loss": 1.7838, + "step": 603 + }, + { + "epoch": 0.2692822113241195, + "grad_norm": 0.23146916925907135, + "learning_rate": 7.526445736570879e-05, + "loss": 1.764, + "step": 604 + }, + { + "epoch": 0.26972804279982165, + "grad_norm": 0.19246350228786469, + "learning_rate": 7.52338861958976e-05, + "loss": 1.6467, + "step": 605 + }, + { + "epoch": 0.27017387427552386, + "grad_norm": 0.2351468801498413, + "learning_rate": 7.520322291442903e-05, + "loss": 1.6567, + "step": 606 + }, + { + "epoch": 0.270619705751226, + "grad_norm": 0.2138238102197647, + "learning_rate": 7.51724676014658e-05, + "loss": 1.6515, + "step": 607 + }, + { + "epoch": 0.2710655372269282, + "grad_norm": 0.23105895519256592, + "learning_rate": 7.51416203374113e-05, + "loss": 1.7563, + "step": 608 + }, + { + "epoch": 0.27151136870263043, + "grad_norm": 0.21428363025188446, + "learning_rate": 7.511068120290926e-05, + "loss": 1.7891, + "step": 609 + }, + { + "epoch": 0.2719572001783326, + "grad_norm": 0.21053239703178406, + "learning_rate": 7.507965027884361e-05, + "loss": 1.7177, + "step": 610 + }, + { + "epoch": 0.2724030316540348, + "grad_norm": 0.231001615524292, + "learning_rate": 7.504852764633823e-05, + "loss": 1.7051, + "step": 611 + }, + { + "epoch": 0.27284886312973694, + "grad_norm": 0.21583721041679382, + "learning_rate": 7.501731338675677e-05, + "loss": 1.7414, + "step": 612 + }, + { + "epoch": 0.27329469460543915, + "grad_norm": 0.21509505808353424, + "learning_rate": 7.498600758170241e-05, + "loss": 1.669, + "step": 613 + }, + { + "epoch": 0.27374052608114136, + "grad_norm": 0.22119706869125366, + "learning_rate": 7.495461031301765e-05, + "loss": 1.6687, + "step": 614 + }, + { + "epoch": 0.2741863575568435, + "grad_norm": 0.22133754193782806, + "learning_rate": 7.492312166278414e-05, + "loss": 1.8043, + "step": 615 + }, + { + "epoch": 0.2746321890325457, + "grad_norm": 0.2934802770614624, + "learning_rate": 7.489154171332236e-05, + "loss": 1.7561, + "step": 616 + }, + { + "epoch": 0.27507802050824787, + "grad_norm": 0.201655313372612, + "learning_rate": 7.485987054719153e-05, + "loss": 1.6652, + "step": 617 + }, + { + "epoch": 0.2755238519839501, + "grad_norm": 0.2276788353919983, + "learning_rate": 7.482810824718931e-05, + "loss": 1.7559, + "step": 618 + }, + { + "epoch": 0.2759696834596522, + "grad_norm": 0.22293934226036072, + "learning_rate": 7.479625489635162e-05, + "loss": 1.6903, + "step": 619 + }, + { + "epoch": 0.27641551493535443, + "grad_norm": 0.20375767350196838, + "learning_rate": 7.476431057795241e-05, + "loss": 1.7936, + "step": 620 + }, + { + "epoch": 0.27686134641105664, + "grad_norm": 0.22783713042736053, + "learning_rate": 7.473227537550346e-05, + "loss": 1.7063, + "step": 621 + }, + { + "epoch": 0.2773071778867588, + "grad_norm": 0.2079494595527649, + "learning_rate": 7.470014937275411e-05, + "loss": 1.7074, + "step": 622 + }, + { + "epoch": 0.277753009362461, + "grad_norm": 0.2122948169708252, + "learning_rate": 7.466793265369114e-05, + "loss": 1.7078, + "step": 623 + }, + { + "epoch": 0.27819884083816315, + "grad_norm": 0.2332155853509903, + "learning_rate": 7.463562530253842e-05, + "loss": 1.8, + "step": 624 + }, + { + "epoch": 0.27864467231386536, + "grad_norm": 0.2049056589603424, + "learning_rate": 7.46032274037568e-05, + "loss": 1.6335, + "step": 625 + }, + { + "epoch": 0.27909050378956757, + "grad_norm": 0.22888043522834778, + "learning_rate": 7.457073904204384e-05, + "loss": 1.6614, + "step": 626 + }, + { + "epoch": 0.2795363352652697, + "grad_norm": 0.2044975906610489, + "learning_rate": 7.45381603023336e-05, + "loss": 1.6818, + "step": 627 + }, + { + "epoch": 0.2799821667409719, + "grad_norm": 0.22751621901988983, + "learning_rate": 7.450549126979643e-05, + "loss": 1.8181, + "step": 628 + }, + { + "epoch": 0.2804279982166741, + "grad_norm": 0.20126068592071533, + "learning_rate": 7.447273202983866e-05, + "loss": 1.7101, + "step": 629 + }, + { + "epoch": 0.2808738296923763, + "grad_norm": 0.2142620086669922, + "learning_rate": 7.443988266810253e-05, + "loss": 1.7224, + "step": 630 + }, + { + "epoch": 0.28131966116807844, + "grad_norm": 0.20431213080883026, + "learning_rate": 7.440694327046587e-05, + "loss": 1.7421, + "step": 631 + }, + { + "epoch": 0.28176549264378065, + "grad_norm": 0.21456865966320038, + "learning_rate": 7.437391392304183e-05, + "loss": 1.8121, + "step": 632 + }, + { + "epoch": 0.28221132411948285, + "grad_norm": 0.21535775065422058, + "learning_rate": 7.43407947121788e-05, + "loss": 1.7968, + "step": 633 + }, + { + "epoch": 0.282657155595185, + "grad_norm": 0.20763567090034485, + "learning_rate": 7.430758572446007e-05, + "loss": 1.776, + "step": 634 + }, + { + "epoch": 0.2831029870708872, + "grad_norm": 0.2219155877828598, + "learning_rate": 7.427428704670357e-05, + "loss": 1.7112, + "step": 635 + }, + { + "epoch": 0.28354881854658937, + "grad_norm": 0.20080716907978058, + "learning_rate": 7.424089876596181e-05, + "loss": 1.7558, + "step": 636 + }, + { + "epoch": 0.2839946500222916, + "grad_norm": 0.2080196887254715, + "learning_rate": 7.420742096952147e-05, + "loss": 1.7308, + "step": 637 + }, + { + "epoch": 0.2844404814979938, + "grad_norm": 0.2138231247663498, + "learning_rate": 7.417385374490327e-05, + "loss": 1.7804, + "step": 638 + }, + { + "epoch": 0.28488631297369593, + "grad_norm": 0.20495419204235077, + "learning_rate": 7.414019717986174e-05, + "loss": 1.7727, + "step": 639 + }, + { + "epoch": 0.28533214444939814, + "grad_norm": 0.21069945394992828, + "learning_rate": 7.410645136238495e-05, + "loss": 1.7712, + "step": 640 + }, + { + "epoch": 0.2857779759251003, + "grad_norm": 0.19948187470436096, + "learning_rate": 7.407261638069433e-05, + "loss": 1.6869, + "step": 641 + }, + { + "epoch": 0.2862238074008025, + "grad_norm": 0.20422448217868805, + "learning_rate": 7.403869232324439e-05, + "loss": 1.7594, + "step": 642 + }, + { + "epoch": 0.2866696388765047, + "grad_norm": 0.1993008702993393, + "learning_rate": 7.40046792787225e-05, + "loss": 1.6994, + "step": 643 + }, + { + "epoch": 0.28711547035220686, + "grad_norm": 0.206438809633255, + "learning_rate": 7.397057733604868e-05, + "loss": 1.789, + "step": 644 + }, + { + "epoch": 0.28756130182790907, + "grad_norm": 0.22038403153419495, + "learning_rate": 7.393638658437537e-05, + "loss": 1.6763, + "step": 645 + }, + { + "epoch": 0.2880071333036112, + "grad_norm": 0.2054975926876068, + "learning_rate": 7.390210711308716e-05, + "loss": 1.7691, + "step": 646 + }, + { + "epoch": 0.2884529647793134, + "grad_norm": 0.20188304781913757, + "learning_rate": 7.38677390118006e-05, + "loss": 1.647, + "step": 647 + }, + { + "epoch": 0.2888987962550156, + "grad_norm": 0.20065569877624512, + "learning_rate": 7.38332823703639e-05, + "loss": 1.7561, + "step": 648 + }, + { + "epoch": 0.2893446277307178, + "grad_norm": 0.2073405534029007, + "learning_rate": 7.379873727885681e-05, + "loss": 1.7425, + "step": 649 + }, + { + "epoch": 0.28979045920642, + "grad_norm": 0.21183277666568756, + "learning_rate": 7.376410382759026e-05, + "loss": 1.7622, + "step": 650 + }, + { + "epoch": 0.29023629068212214, + "grad_norm": 0.21674148738384247, + "learning_rate": 7.372938210710622e-05, + "loss": 1.8301, + "step": 651 + }, + { + "epoch": 0.29068212215782435, + "grad_norm": 0.2239564210176468, + "learning_rate": 7.369457220817736e-05, + "loss": 1.7061, + "step": 652 + }, + { + "epoch": 0.2911279536335265, + "grad_norm": 0.2046596258878708, + "learning_rate": 7.365967422180692e-05, + "loss": 1.7185, + "step": 653 + }, + { + "epoch": 0.2915737851092287, + "grad_norm": 0.2077386975288391, + "learning_rate": 7.362468823922844e-05, + "loss": 1.6894, + "step": 654 + }, + { + "epoch": 0.2920196165849309, + "grad_norm": 0.22262682020664215, + "learning_rate": 7.358961435190546e-05, + "loss": 1.743, + "step": 655 + }, + { + "epoch": 0.29246544806063307, + "grad_norm": 0.22507880628108978, + "learning_rate": 7.355445265153136e-05, + "loss": 1.7843, + "step": 656 + }, + { + "epoch": 0.2929112795363353, + "grad_norm": 0.2175893634557724, + "learning_rate": 7.35192032300291e-05, + "loss": 1.7287, + "step": 657 + }, + { + "epoch": 0.29335711101203743, + "grad_norm": 0.2029731124639511, + "learning_rate": 7.348386617955094e-05, + "loss": 1.6842, + "step": 658 + }, + { + "epoch": 0.29380294248773964, + "grad_norm": 0.23122525215148926, + "learning_rate": 7.344844159247823e-05, + "loss": 1.8079, + "step": 659 + }, + { + "epoch": 0.29424877396344185, + "grad_norm": 0.2044292837381363, + "learning_rate": 7.341292956142117e-05, + "loss": 1.8154, + "step": 660 + }, + { + "epoch": 0.294694605439144, + "grad_norm": 0.20966410636901855, + "learning_rate": 7.337733017921859e-05, + "loss": 1.6904, + "step": 661 + }, + { + "epoch": 0.2951404369148462, + "grad_norm": 0.20876199007034302, + "learning_rate": 7.334164353893765e-05, + "loss": 1.7961, + "step": 662 + }, + { + "epoch": 0.29558626839054836, + "grad_norm": 0.2060309499502182, + "learning_rate": 7.330586973387364e-05, + "loss": 1.6499, + "step": 663 + }, + { + "epoch": 0.29603209986625056, + "grad_norm": 0.21161815524101257, + "learning_rate": 7.327000885754973e-05, + "loss": 1.7694, + "step": 664 + }, + { + "epoch": 0.2964779313419527, + "grad_norm": 0.22235240042209625, + "learning_rate": 7.32340610037167e-05, + "loss": 1.8006, + "step": 665 + }, + { + "epoch": 0.2969237628176549, + "grad_norm": 0.2124897986650467, + "learning_rate": 7.319802626635271e-05, + "loss": 1.8204, + "step": 666 + }, + { + "epoch": 0.29736959429335713, + "grad_norm": 0.24330593645572662, + "learning_rate": 7.316190473966312e-05, + "loss": 1.7003, + "step": 667 + }, + { + "epoch": 0.2978154257690593, + "grad_norm": 0.20732414722442627, + "learning_rate": 7.312569651808011e-05, + "loss": 1.5801, + "step": 668 + }, + { + "epoch": 0.2982612572447615, + "grad_norm": 0.22267280519008636, + "learning_rate": 7.308940169626255e-05, + "loss": 1.7417, + "step": 669 + }, + { + "epoch": 0.29870708872046364, + "grad_norm": 0.20995312929153442, + "learning_rate": 7.305302036909567e-05, + "loss": 1.7385, + "step": 670 + }, + { + "epoch": 0.29915292019616585, + "grad_norm": 0.21934688091278076, + "learning_rate": 7.30165526316909e-05, + "loss": 1.7042, + "step": 671 + }, + { + "epoch": 0.29959875167186806, + "grad_norm": 0.22048768401145935, + "learning_rate": 7.297999857938554e-05, + "loss": 1.7672, + "step": 672 + }, + { + "epoch": 0.3000445831475702, + "grad_norm": 0.20573429763317108, + "learning_rate": 7.294335830774252e-05, + "loss": 1.7556, + "step": 673 + }, + { + "epoch": 0.3004904146232724, + "grad_norm": 0.21020594239234924, + "learning_rate": 7.290663191255022e-05, + "loss": 1.727, + "step": 674 + }, + { + "epoch": 0.30093624609897457, + "grad_norm": 0.20675401389598846, + "learning_rate": 7.286981948982215e-05, + "loss": 1.6531, + "step": 675 + }, + { + "epoch": 0.3013820775746768, + "grad_norm": 0.20087653398513794, + "learning_rate": 7.283292113579672e-05, + "loss": 1.7088, + "step": 676 + }, + { + "epoch": 0.301827909050379, + "grad_norm": 0.21411505341529846, + "learning_rate": 7.279593694693698e-05, + "loss": 1.7798, + "step": 677 + }, + { + "epoch": 0.30227374052608114, + "grad_norm": 0.21519498527050018, + "learning_rate": 7.275886701993042e-05, + "loss": 1.722, + "step": 678 + }, + { + "epoch": 0.30271957200178334, + "grad_norm": 0.21711432933807373, + "learning_rate": 7.272171145168863e-05, + "loss": 1.8175, + "step": 679 + }, + { + "epoch": 0.3031654034774855, + "grad_norm": 0.20538119971752167, + "learning_rate": 7.268447033934708e-05, + "loss": 1.7196, + "step": 680 + }, + { + "epoch": 0.3036112349531877, + "grad_norm": 0.22172988951206207, + "learning_rate": 7.264714378026494e-05, + "loss": 1.7341, + "step": 681 + }, + { + "epoch": 0.30405706642888986, + "grad_norm": 0.20911812782287598, + "learning_rate": 7.26097318720247e-05, + "loss": 1.7628, + "step": 682 + }, + { + "epoch": 0.30450289790459206, + "grad_norm": 0.20472237467765808, + "learning_rate": 7.257223471243201e-05, + "loss": 1.8058, + "step": 683 + }, + { + "epoch": 0.30494872938029427, + "grad_norm": 0.2150549590587616, + "learning_rate": 7.253465239951539e-05, + "loss": 1.6952, + "step": 684 + }, + { + "epoch": 0.3053945608559964, + "grad_norm": 0.20892566442489624, + "learning_rate": 7.249698503152596e-05, + "loss": 1.757, + "step": 685 + }, + { + "epoch": 0.30584039233169863, + "grad_norm": 0.20996753871440887, + "learning_rate": 7.245923270693721e-05, + "loss": 1.7039, + "step": 686 + }, + { + "epoch": 0.3062862238074008, + "grad_norm": 0.20857934653759003, + "learning_rate": 7.242139552444473e-05, + "loss": 1.7152, + "step": 687 + }, + { + "epoch": 0.306732055283103, + "grad_norm": 0.20755071938037872, + "learning_rate": 7.238347358296596e-05, + "loss": 1.7138, + "step": 688 + }, + { + "epoch": 0.3071778867588052, + "grad_norm": 0.2029207944869995, + "learning_rate": 7.234546698163992e-05, + "loss": 1.7338, + "step": 689 + }, + { + "epoch": 0.30762371823450735, + "grad_norm": 0.20173445343971252, + "learning_rate": 7.230737581982694e-05, + "loss": 1.7759, + "step": 690 + }, + { + "epoch": 0.30806954971020956, + "grad_norm": 0.2252359688282013, + "learning_rate": 7.226920019710845e-05, + "loss": 1.7886, + "step": 691 + }, + { + "epoch": 0.3085153811859117, + "grad_norm": 0.2031915783882141, + "learning_rate": 7.223094021328664e-05, + "loss": 1.7351, + "step": 692 + }, + { + "epoch": 0.3089612126616139, + "grad_norm": 0.20862999558448792, + "learning_rate": 7.219259596838429e-05, + "loss": 1.6763, + "step": 693 + }, + { + "epoch": 0.30940704413731607, + "grad_norm": 0.21994739770889282, + "learning_rate": 7.215416756264442e-05, + "loss": 1.8017, + "step": 694 + }, + { + "epoch": 0.3098528756130183, + "grad_norm": 0.19967246055603027, + "learning_rate": 7.211565509653012e-05, + "loss": 1.696, + "step": 695 + }, + { + "epoch": 0.3102987070887205, + "grad_norm": 0.2177416831254959, + "learning_rate": 7.207705867072421e-05, + "loss": 1.7102, + "step": 696 + }, + { + "epoch": 0.31074453856442263, + "grad_norm": 0.22154054045677185, + "learning_rate": 7.203837838612897e-05, + "loss": 1.6522, + "step": 697 + }, + { + "epoch": 0.31119037004012484, + "grad_norm": 0.21412450075149536, + "learning_rate": 7.1999614343866e-05, + "loss": 1.8394, + "step": 698 + }, + { + "epoch": 0.311636201515827, + "grad_norm": 0.24822783470153809, + "learning_rate": 7.196076664527577e-05, + "loss": 1.7955, + "step": 699 + }, + { + "epoch": 0.3120820329915292, + "grad_norm": 0.21336504817008972, + "learning_rate": 7.192183539191753e-05, + "loss": 1.7144, + "step": 700 + }, + { + "epoch": 0.3125278644672314, + "grad_norm": 0.2380637526512146, + "learning_rate": 7.188282068556891e-05, + "loss": 1.6807, + "step": 701 + }, + { + "epoch": 0.31297369594293356, + "grad_norm": 0.19818735122680664, + "learning_rate": 7.184372262822575e-05, + "loss": 1.7561, + "step": 702 + }, + { + "epoch": 0.31341952741863577, + "grad_norm": 0.23580355942249298, + "learning_rate": 7.180454132210179e-05, + "loss": 1.7519, + "step": 703 + }, + { + "epoch": 0.3138653588943379, + "grad_norm": 0.2163688987493515, + "learning_rate": 7.176527686962835e-05, + "loss": 1.7003, + "step": 704 + }, + { + "epoch": 0.3143111903700401, + "grad_norm": 0.2241927832365036, + "learning_rate": 7.172592937345421e-05, + "loss": 1.6941, + "step": 705 + }, + { + "epoch": 0.31475702184574234, + "grad_norm": 0.22052347660064697, + "learning_rate": 7.168649893644517e-05, + "loss": 1.7133, + "step": 706 + }, + { + "epoch": 0.3152028533214445, + "grad_norm": 0.22314177453517914, + "learning_rate": 7.164698566168393e-05, + "loss": 1.7262, + "step": 707 + }, + { + "epoch": 0.3156486847971467, + "grad_norm": 0.2206079065799713, + "learning_rate": 7.16073896524697e-05, + "loss": 1.7779, + "step": 708 + }, + { + "epoch": 0.31609451627284885, + "grad_norm": 0.20930807292461395, + "learning_rate": 7.156771101231798e-05, + "loss": 1.7041, + "step": 709 + }, + { + "epoch": 0.31654034774855105, + "grad_norm": 0.21127712726593018, + "learning_rate": 7.152794984496034e-05, + "loss": 1.7807, + "step": 710 + }, + { + "epoch": 0.3169861792242532, + "grad_norm": 0.19875945150852203, + "learning_rate": 7.148810625434406e-05, + "loss": 1.7438, + "step": 711 + }, + { + "epoch": 0.3174320106999554, + "grad_norm": 0.20444701611995697, + "learning_rate": 7.14481803446319e-05, + "loss": 1.7587, + "step": 712 + }, + { + "epoch": 0.3178778421756576, + "grad_norm": 0.19894717633724213, + "learning_rate": 7.140817222020186e-05, + "loss": 1.7047, + "step": 713 + }, + { + "epoch": 0.3183236736513598, + "grad_norm": 0.20163820683956146, + "learning_rate": 7.136808198564683e-05, + "loss": 1.6941, + "step": 714 + }, + { + "epoch": 0.318769505127062, + "grad_norm": 0.20997068285942078, + "learning_rate": 7.132790974577438e-05, + "loss": 1.7468, + "step": 715 + }, + { + "epoch": 0.31921533660276413, + "grad_norm": 0.1935359686613083, + "learning_rate": 7.128765560560646e-05, + "loss": 1.7119, + "step": 716 + }, + { + "epoch": 0.31966116807846634, + "grad_norm": 0.20880842208862305, + "learning_rate": 7.124731967037916e-05, + "loss": 1.6706, + "step": 717 + }, + { + "epoch": 0.32010699955416855, + "grad_norm": 0.21494819223880768, + "learning_rate": 7.120690204554236e-05, + "loss": 1.774, + "step": 718 + }, + { + "epoch": 0.3205528310298707, + "grad_norm": 0.2001909464597702, + "learning_rate": 7.116640283675952e-05, + "loss": 1.6931, + "step": 719 + }, + { + "epoch": 0.3209986625055729, + "grad_norm": 0.20694108307361603, + "learning_rate": 7.112582214990739e-05, + "loss": 1.7273, + "step": 720 + }, + { + "epoch": 0.32144449398127506, + "grad_norm": 0.19691170752048492, + "learning_rate": 7.108516009107573e-05, + "loss": 1.6678, + "step": 721 + }, + { + "epoch": 0.32189032545697727, + "grad_norm": 0.21288730204105377, + "learning_rate": 7.104441676656704e-05, + "loss": 1.7421, + "step": 722 + }, + { + "epoch": 0.3223361569326795, + "grad_norm": 0.2045082151889801, + "learning_rate": 7.100359228289623e-05, + "loss": 1.7416, + "step": 723 + }, + { + "epoch": 0.3227819884083816, + "grad_norm": 0.23858684301376343, + "learning_rate": 7.096268674679042e-05, + "loss": 1.816, + "step": 724 + }, + { + "epoch": 0.32322781988408383, + "grad_norm": 0.20045718550682068, + "learning_rate": 7.092170026518863e-05, + "loss": 1.7845, + "step": 725 + }, + { + "epoch": 0.323673651359786, + "grad_norm": 0.22614650428295135, + "learning_rate": 7.088063294524144e-05, + "loss": 1.7489, + "step": 726 + }, + { + "epoch": 0.3241194828354882, + "grad_norm": 0.2049480676651001, + "learning_rate": 7.083948489431085e-05, + "loss": 1.6892, + "step": 727 + }, + { + "epoch": 0.32456531431119034, + "grad_norm": 0.20532821118831635, + "learning_rate": 7.079825621996984e-05, + "loss": 1.7088, + "step": 728 + }, + { + "epoch": 0.32501114578689255, + "grad_norm": 0.20658715069293976, + "learning_rate": 7.07569470300022e-05, + "loss": 1.6465, + "step": 729 + }, + { + "epoch": 0.32545697726259476, + "grad_norm": 0.21333490312099457, + "learning_rate": 7.07155574324022e-05, + "loss": 1.8009, + "step": 730 + }, + { + "epoch": 0.3259028087382969, + "grad_norm": 0.19863495230674744, + "learning_rate": 7.067408753537434e-05, + "loss": 1.6996, + "step": 731 + }, + { + "epoch": 0.3263486402139991, + "grad_norm": 0.19713807106018066, + "learning_rate": 7.063253744733301e-05, + "loss": 1.7072, + "step": 732 + }, + { + "epoch": 0.32679447168970127, + "grad_norm": 0.21338967978954315, + "learning_rate": 7.059090727690227e-05, + "loss": 1.739, + "step": 733 + }, + { + "epoch": 0.3272403031654035, + "grad_norm": 0.2086186707019806, + "learning_rate": 7.054919713291555e-05, + "loss": 1.7702, + "step": 734 + }, + { + "epoch": 0.3276861346411057, + "grad_norm": 0.20826803147792816, + "learning_rate": 7.050740712441528e-05, + "loss": 1.8603, + "step": 735 + }, + { + "epoch": 0.32813196611680784, + "grad_norm": 0.1978282779455185, + "learning_rate": 7.046553736065278e-05, + "loss": 1.6992, + "step": 736 + }, + { + "epoch": 0.32857779759251005, + "grad_norm": 0.21559976041316986, + "learning_rate": 7.042358795108783e-05, + "loss": 1.7706, + "step": 737 + }, + { + "epoch": 0.3290236290682122, + "grad_norm": 0.20173758268356323, + "learning_rate": 7.03815590053884e-05, + "loss": 1.7819, + "step": 738 + }, + { + "epoch": 0.3294694605439144, + "grad_norm": 0.20154009759426117, + "learning_rate": 7.033945063343041e-05, + "loss": 1.6949, + "step": 739 + }, + { + "epoch": 0.32991529201961656, + "grad_norm": 0.22486606240272522, + "learning_rate": 7.029726294529747e-05, + "loss": 1.7281, + "step": 740 + }, + { + "epoch": 0.33036112349531876, + "grad_norm": 0.2042747586965561, + "learning_rate": 7.025499605128046e-05, + "loss": 1.6134, + "step": 741 + }, + { + "epoch": 0.330806954971021, + "grad_norm": 0.23044522106647491, + "learning_rate": 7.021265006187739e-05, + "loss": 1.7322, + "step": 742 + }, + { + "epoch": 0.3312527864467231, + "grad_norm": 0.20150107145309448, + "learning_rate": 7.017022508779304e-05, + "loss": 1.6962, + "step": 743 + }, + { + "epoch": 0.33169861792242533, + "grad_norm": 0.23153840005397797, + "learning_rate": 7.012772123993865e-05, + "loss": 1.7392, + "step": 744 + }, + { + "epoch": 0.3321444493981275, + "grad_norm": 0.22877974808216095, + "learning_rate": 7.008513862943168e-05, + "loss": 1.7461, + "step": 745 + }, + { + "epoch": 0.3325902808738297, + "grad_norm": 0.22845101356506348, + "learning_rate": 7.004247736759552e-05, + "loss": 1.7318, + "step": 746 + }, + { + "epoch": 0.3330361123495319, + "grad_norm": 0.2245214730501175, + "learning_rate": 6.999973756595912e-05, + "loss": 1.7178, + "step": 747 + }, + { + "epoch": 0.33348194382523405, + "grad_norm": 0.21158166229724884, + "learning_rate": 6.995691933625683e-05, + "loss": 1.7885, + "step": 748 + }, + { + "epoch": 0.33392777530093626, + "grad_norm": 0.22586104273796082, + "learning_rate": 6.991402279042796e-05, + "loss": 1.8011, + "step": 749 + }, + { + "epoch": 0.3343736067766384, + "grad_norm": 0.2159549593925476, + "learning_rate": 6.987104804061659e-05, + "loss": 1.7776, + "step": 750 + }, + { + "epoch": 0.3348194382523406, + "grad_norm": 0.26645052433013916, + "learning_rate": 6.982799519917129e-05, + "loss": 1.7894, + "step": 751 + }, + { + "epoch": 0.3352652697280428, + "grad_norm": 0.22301529347896576, + "learning_rate": 6.978486437864473e-05, + "loss": 1.8246, + "step": 752 + }, + { + "epoch": 0.335711101203745, + "grad_norm": 0.2200409322977066, + "learning_rate": 6.974165569179343e-05, + "loss": 1.7634, + "step": 753 + }, + { + "epoch": 0.3361569326794472, + "grad_norm": 0.24905377626419067, + "learning_rate": 6.969836925157757e-05, + "loss": 1.7187, + "step": 754 + }, + { + "epoch": 0.33660276415514934, + "grad_norm": 0.20776088535785675, + "learning_rate": 6.965500517116045e-05, + "loss": 1.7115, + "step": 755 + }, + { + "epoch": 0.33704859563085154, + "grad_norm": 0.22392477095127106, + "learning_rate": 6.961156356390848e-05, + "loss": 1.8265, + "step": 756 + }, + { + "epoch": 0.3374944271065537, + "grad_norm": 0.23375096917152405, + "learning_rate": 6.956804454339069e-05, + "loss": 1.7948, + "step": 757 + }, + { + "epoch": 0.3379402585822559, + "grad_norm": 0.20256270468235016, + "learning_rate": 6.952444822337848e-05, + "loss": 1.69, + "step": 758 + }, + { + "epoch": 0.3383860900579581, + "grad_norm": 0.20559729635715485, + "learning_rate": 6.948077471784536e-05, + "loss": 1.6454, + "step": 759 + }, + { + "epoch": 0.33883192153366026, + "grad_norm": 0.23576031625270844, + "learning_rate": 6.94370241409666e-05, + "loss": 1.7647, + "step": 760 + }, + { + "epoch": 0.33927775300936247, + "grad_norm": 0.20360809564590454, + "learning_rate": 6.939319660711899e-05, + "loss": 1.6665, + "step": 761 + }, + { + "epoch": 0.3397235844850646, + "grad_norm": 0.21529042720794678, + "learning_rate": 6.934929223088049e-05, + "loss": 1.805, + "step": 762 + }, + { + "epoch": 0.34016941596076683, + "grad_norm": 0.22950305044651031, + "learning_rate": 6.930531112702993e-05, + "loss": 1.6865, + "step": 763 + }, + { + "epoch": 0.34061524743646904, + "grad_norm": 0.2098643183708191, + "learning_rate": 6.926125341054676e-05, + "loss": 1.7414, + "step": 764 + }, + { + "epoch": 0.3410610789121712, + "grad_norm": 0.2188938707113266, + "learning_rate": 6.921711919661071e-05, + "loss": 1.7158, + "step": 765 + }, + { + "epoch": 0.3415069103878734, + "grad_norm": 0.2061680257320404, + "learning_rate": 6.917290860060146e-05, + "loss": 1.6428, + "step": 766 + }, + { + "epoch": 0.34195274186357555, + "grad_norm": 0.2051788568496704, + "learning_rate": 6.912862173809844e-05, + "loss": 1.7216, + "step": 767 + }, + { + "epoch": 0.34239857333927776, + "grad_norm": 0.22325345873832703, + "learning_rate": 6.908425872488042e-05, + "loss": 1.7829, + "step": 768 + }, + { + "epoch": 0.34284440481497996, + "grad_norm": 0.2041957527399063, + "learning_rate": 6.903981967692524e-05, + "loss": 1.7991, + "step": 769 + }, + { + "epoch": 0.3432902362906821, + "grad_norm": 0.22522366046905518, + "learning_rate": 6.899530471040956e-05, + "loss": 1.6949, + "step": 770 + }, + { + "epoch": 0.3437360677663843, + "grad_norm": 0.21507549285888672, + "learning_rate": 6.895071394170847e-05, + "loss": 1.7102, + "step": 771 + }, + { + "epoch": 0.3441818992420865, + "grad_norm": 0.2359284907579422, + "learning_rate": 6.890604748739526e-05, + "loss": 1.7716, + "step": 772 + }, + { + "epoch": 0.3446277307177887, + "grad_norm": 0.2056940346956253, + "learning_rate": 6.886130546424106e-05, + "loss": 1.7614, + "step": 773 + }, + { + "epoch": 0.34507356219349083, + "grad_norm": 0.20665904879570007, + "learning_rate": 6.88164879892146e-05, + "loss": 1.7882, + "step": 774 + }, + { + "epoch": 0.34551939366919304, + "grad_norm": 0.21209891140460968, + "learning_rate": 6.87715951794818e-05, + "loss": 1.7265, + "step": 775 + }, + { + "epoch": 0.34596522514489525, + "grad_norm": 0.19885431230068207, + "learning_rate": 6.872662715240558e-05, + "loss": 1.6964, + "step": 776 + }, + { + "epoch": 0.3464110566205974, + "grad_norm": 0.2021333873271942, + "learning_rate": 6.868158402554546e-05, + "loss": 1.7327, + "step": 777 + }, + { + "epoch": 0.3468568880962996, + "grad_norm": 0.20405402779579163, + "learning_rate": 6.863646591665734e-05, + "loss": 1.733, + "step": 778 + }, + { + "epoch": 0.34730271957200176, + "grad_norm": 0.21486836671829224, + "learning_rate": 6.859127294369313e-05, + "loss": 1.8163, + "step": 779 + }, + { + "epoch": 0.34774855104770397, + "grad_norm": 0.2023477703332901, + "learning_rate": 6.85460052248004e-05, + "loss": 1.7342, + "step": 780 + }, + { + "epoch": 0.3481943825234062, + "grad_norm": 0.21515972912311554, + "learning_rate": 6.85006628783222e-05, + "loss": 1.7713, + "step": 781 + }, + { + "epoch": 0.34864021399910833, + "grad_norm": 0.20617273449897766, + "learning_rate": 6.845524602279668e-05, + "loss": 1.7489, + "step": 782 + }, + { + "epoch": 0.34908604547481054, + "grad_norm": 0.19855135679244995, + "learning_rate": 6.840975477695669e-05, + "loss": 1.7432, + "step": 783 + }, + { + "epoch": 0.3495318769505127, + "grad_norm": 0.21225327253341675, + "learning_rate": 6.836418925972967e-05, + "loss": 1.7179, + "step": 784 + }, + { + "epoch": 0.3499777084262149, + "grad_norm": 0.20852187275886536, + "learning_rate": 6.831854959023714e-05, + "loss": 1.6805, + "step": 785 + }, + { + "epoch": 0.3504235399019171, + "grad_norm": 0.2072257697582245, + "learning_rate": 6.827283588779451e-05, + "loss": 1.7291, + "step": 786 + }, + { + "epoch": 0.35086937137761925, + "grad_norm": 0.19819296896457672, + "learning_rate": 6.822704827191078e-05, + "loss": 1.6697, + "step": 787 + }, + { + "epoch": 0.35131520285332146, + "grad_norm": 0.22480206191539764, + "learning_rate": 6.81811868622881e-05, + "loss": 1.8037, + "step": 788 + }, + { + "epoch": 0.3517610343290236, + "grad_norm": 0.20416642725467682, + "learning_rate": 6.813525177882156e-05, + "loss": 1.7069, + "step": 789 + }, + { + "epoch": 0.3522068658047258, + "grad_norm": 0.2089570313692093, + "learning_rate": 6.808924314159889e-05, + "loss": 1.7296, + "step": 790 + }, + { + "epoch": 0.352652697280428, + "grad_norm": 0.2162252515554428, + "learning_rate": 6.804316107090009e-05, + "loss": 1.719, + "step": 791 + }, + { + "epoch": 0.3530985287561302, + "grad_norm": 0.21270179748535156, + "learning_rate": 6.799700568719714e-05, + "loss": 1.7671, + "step": 792 + }, + { + "epoch": 0.3535443602318324, + "grad_norm": 0.20561178028583527, + "learning_rate": 6.795077711115368e-05, + "loss": 1.6969, + "step": 793 + }, + { + "epoch": 0.35399019170753454, + "grad_norm": 0.19621388614177704, + "learning_rate": 6.790447546362467e-05, + "loss": 1.6404, + "step": 794 + }, + { + "epoch": 0.35443602318323675, + "grad_norm": 0.21053430438041687, + "learning_rate": 6.785810086565613e-05, + "loss": 1.7553, + "step": 795 + }, + { + "epoch": 0.3548818546589389, + "grad_norm": 0.20376941561698914, + "learning_rate": 6.781165343848482e-05, + "loss": 1.7597, + "step": 796 + }, + { + "epoch": 0.3553276861346411, + "grad_norm": 0.20846118032932281, + "learning_rate": 6.776513330353783e-05, + "loss": 1.7618, + "step": 797 + }, + { + "epoch": 0.3557735176103433, + "grad_norm": 0.21923816204071045, + "learning_rate": 6.771854058243239e-05, + "loss": 1.777, + "step": 798 + }, + { + "epoch": 0.35621934908604547, + "grad_norm": 0.2055412381887436, + "learning_rate": 6.767187539697544e-05, + "loss": 1.706, + "step": 799 + }, + { + "epoch": 0.3566651805617477, + "grad_norm": 0.21490585803985596, + "learning_rate": 6.762513786916341e-05, + "loss": 1.7621, + "step": 800 + }, + { + "epoch": 0.3571110120374498, + "grad_norm": 0.21446526050567627, + "learning_rate": 6.757832812118183e-05, + "loss": 1.6544, + "step": 801 + }, + { + "epoch": 0.35755684351315203, + "grad_norm": 0.20049992203712463, + "learning_rate": 6.753144627540503e-05, + "loss": 1.6754, + "step": 802 + }, + { + "epoch": 0.3580026749888542, + "grad_norm": 0.2026570439338684, + "learning_rate": 6.748449245439582e-05, + "loss": 1.6972, + "step": 803 + }, + { + "epoch": 0.3584485064645564, + "grad_norm": 0.2295822948217392, + "learning_rate": 6.743746678090521e-05, + "loss": 1.7813, + "step": 804 + }, + { + "epoch": 0.3588943379402586, + "grad_norm": 0.20751531422138214, + "learning_rate": 6.7390369377872e-05, + "loss": 1.6808, + "step": 805 + }, + { + "epoch": 0.35934016941596075, + "grad_norm": 0.2165253609418869, + "learning_rate": 6.734320036842259e-05, + "loss": 1.7764, + "step": 806 + }, + { + "epoch": 0.35978600089166296, + "grad_norm": 0.20914991199970245, + "learning_rate": 6.729595987587048e-05, + "loss": 1.7806, + "step": 807 + }, + { + "epoch": 0.3602318323673651, + "grad_norm": 0.21666984260082245, + "learning_rate": 6.724864802371613e-05, + "loss": 1.7888, + "step": 808 + }, + { + "epoch": 0.3606776638430673, + "grad_norm": 0.23241227865219116, + "learning_rate": 6.720126493564651e-05, + "loss": 1.8059, + "step": 809 + }, + { + "epoch": 0.3611234953187695, + "grad_norm": 0.20020677149295807, + "learning_rate": 6.715381073553484e-05, + "loss": 1.6864, + "step": 810 + }, + { + "epoch": 0.3615693267944717, + "grad_norm": 0.20438076555728912, + "learning_rate": 6.710628554744024e-05, + "loss": 1.6708, + "step": 811 + }, + { + "epoch": 0.3620151582701739, + "grad_norm": 0.21755672991275787, + "learning_rate": 6.705868949560743e-05, + "loss": 1.7963, + "step": 812 + }, + { + "epoch": 0.36246098974587604, + "grad_norm": 0.2186664640903473, + "learning_rate": 6.701102270446634e-05, + "loss": 1.7591, + "step": 813 + }, + { + "epoch": 0.36290682122157825, + "grad_norm": 0.2028367966413498, + "learning_rate": 6.696328529863193e-05, + "loss": 1.7197, + "step": 814 + }, + { + "epoch": 0.36335265269728045, + "grad_norm": 0.2205536663532257, + "learning_rate": 6.691547740290365e-05, + "loss": 1.6579, + "step": 815 + }, + { + "epoch": 0.3637984841729826, + "grad_norm": 0.19804279506206512, + "learning_rate": 6.68675991422653e-05, + "loss": 1.669, + "step": 816 + }, + { + "epoch": 0.3642443156486848, + "grad_norm": 0.21472683548927307, + "learning_rate": 6.681965064188463e-05, + "loss": 1.7421, + "step": 817 + }, + { + "epoch": 0.36469014712438697, + "grad_norm": 0.20572058856487274, + "learning_rate": 6.6771632027113e-05, + "loss": 1.6647, + "step": 818 + }, + { + "epoch": 0.3651359786000892, + "grad_norm": 0.21703673899173737, + "learning_rate": 6.672354342348508e-05, + "loss": 1.7923, + "step": 819 + }, + { + "epoch": 0.3655818100757913, + "grad_norm": 0.21246255934238434, + "learning_rate": 6.667538495671851e-05, + "loss": 1.6859, + "step": 820 + }, + { + "epoch": 0.36602764155149353, + "grad_norm": 0.19298550486564636, + "learning_rate": 6.662715675271358e-05, + "loss": 1.6656, + "step": 821 + }, + { + "epoch": 0.36647347302719574, + "grad_norm": 0.21339663863182068, + "learning_rate": 6.657885893755288e-05, + "loss": 1.7414, + "step": 822 + }, + { + "epoch": 0.3669193045028979, + "grad_norm": 0.21220877766609192, + "learning_rate": 6.653049163750098e-05, + "loss": 1.6884, + "step": 823 + }, + { + "epoch": 0.3673651359786001, + "grad_norm": 0.21043981611728668, + "learning_rate": 6.648205497900412e-05, + "loss": 1.7189, + "step": 824 + }, + { + "epoch": 0.36781096745430225, + "grad_norm": 0.2267450988292694, + "learning_rate": 6.643354908868985e-05, + "loss": 1.7781, + "step": 825 + }, + { + "epoch": 0.36825679893000446, + "grad_norm": 0.20768927037715912, + "learning_rate": 6.638497409336671e-05, + "loss": 1.6851, + "step": 826 + }, + { + "epoch": 0.36870263040570667, + "grad_norm": 0.20903393626213074, + "learning_rate": 6.633633012002392e-05, + "loss": 1.693, + "step": 827 + }, + { + "epoch": 0.3691484618814088, + "grad_norm": 0.21910394728183746, + "learning_rate": 6.628761729583104e-05, + "loss": 1.7055, + "step": 828 + }, + { + "epoch": 0.369594293357111, + "grad_norm": 0.19814462959766388, + "learning_rate": 6.623883574813758e-05, + "loss": 1.6605, + "step": 829 + }, + { + "epoch": 0.3700401248328132, + "grad_norm": 0.21070930361747742, + "learning_rate": 6.618998560447272e-05, + "loss": 1.6962, + "step": 830 + }, + { + "epoch": 0.3704859563085154, + "grad_norm": 0.20243875682353973, + "learning_rate": 6.614106699254502e-05, + "loss": 1.6691, + "step": 831 + }, + { + "epoch": 0.3709317877842176, + "grad_norm": 0.19810143113136292, + "learning_rate": 6.609208004024199e-05, + "loss": 1.6673, + "step": 832 + }, + { + "epoch": 0.37137761925991974, + "grad_norm": 0.2071191817522049, + "learning_rate": 6.604302487562981e-05, + "loss": 1.6635, + "step": 833 + }, + { + "epoch": 0.37182345073562195, + "grad_norm": 0.2069198191165924, + "learning_rate": 6.599390162695299e-05, + "loss": 1.8186, + "step": 834 + }, + { + "epoch": 0.3722692822113241, + "grad_norm": 0.20763137936592102, + "learning_rate": 6.594471042263404e-05, + "loss": 1.7275, + "step": 835 + }, + { + "epoch": 0.3727151136870263, + "grad_norm": 0.2052050232887268, + "learning_rate": 6.589545139127312e-05, + "loss": 1.6643, + "step": 836 + }, + { + "epoch": 0.37316094516272846, + "grad_norm": 0.21433968842029572, + "learning_rate": 6.58461246616477e-05, + "loss": 1.7497, + "step": 837 + }, + { + "epoch": 0.37360677663843067, + "grad_norm": 0.2157992571592331, + "learning_rate": 6.579673036271224e-05, + "loss": 1.7057, + "step": 838 + }, + { + "epoch": 0.3740526081141329, + "grad_norm": 0.20648705959320068, + "learning_rate": 6.574726862359785e-05, + "loss": 1.6205, + "step": 839 + }, + { + "epoch": 0.37449843958983503, + "grad_norm": 0.20061194896697998, + "learning_rate": 6.569773957361195e-05, + "loss": 1.6565, + "step": 840 + }, + { + "epoch": 0.37494427106553724, + "grad_norm": 0.21495498716831207, + "learning_rate": 6.564814334223793e-05, + "loss": 1.6354, + "step": 841 + }, + { + "epoch": 0.3753901025412394, + "grad_norm": 0.21777182817459106, + "learning_rate": 6.559848005913481e-05, + "loss": 1.7688, + "step": 842 + }, + { + "epoch": 0.3758359340169416, + "grad_norm": 0.22829119861125946, + "learning_rate": 6.554874985413689e-05, + "loss": 1.7521, + "step": 843 + }, + { + "epoch": 0.3762817654926438, + "grad_norm": 0.20866426825523376, + "learning_rate": 6.549895285725342e-05, + "loss": 1.6905, + "step": 844 + }, + { + "epoch": 0.37672759696834596, + "grad_norm": 0.22749756276607513, + "learning_rate": 6.544908919866831e-05, + "loss": 1.7736, + "step": 845 + }, + { + "epoch": 0.37717342844404816, + "grad_norm": 0.23048724234104156, + "learning_rate": 6.539915900873969e-05, + "loss": 1.7949, + "step": 846 + }, + { + "epoch": 0.3776192599197503, + "grad_norm": 0.21478885412216187, + "learning_rate": 6.534916241799965e-05, + "loss": 1.7441, + "step": 847 + }, + { + "epoch": 0.3780650913954525, + "grad_norm": 0.23727373778820038, + "learning_rate": 6.529909955715385e-05, + "loss": 1.7407, + "step": 848 + }, + { + "epoch": 0.3785109228711547, + "grad_norm": 0.21736378967761993, + "learning_rate": 6.524897055708123e-05, + "loss": 1.7306, + "step": 849 + }, + { + "epoch": 0.3789567543468569, + "grad_norm": 0.20656636357307434, + "learning_rate": 6.519877554883362e-05, + "loss": 1.7487, + "step": 850 + }, + { + "epoch": 0.3794025858225591, + "grad_norm": 0.21003365516662598, + "learning_rate": 6.51485146636354e-05, + "loss": 1.6466, + "step": 851 + }, + { + "epoch": 0.37984841729826124, + "grad_norm": 0.21377114951610565, + "learning_rate": 6.509818803288322e-05, + "loss": 1.7776, + "step": 852 + }, + { + "epoch": 0.38029424877396345, + "grad_norm": 0.2006235122680664, + "learning_rate": 6.504779578814552e-05, + "loss": 1.7191, + "step": 853 + }, + { + "epoch": 0.3807400802496656, + "grad_norm": 0.2083541452884674, + "learning_rate": 6.499733806116237e-05, + "loss": 1.7004, + "step": 854 + }, + { + "epoch": 0.3811859117253678, + "grad_norm": 0.20760244131088257, + "learning_rate": 6.494681498384497e-05, + "loss": 1.7231, + "step": 855 + }, + { + "epoch": 0.38163174320107, + "grad_norm": 0.21493734419345856, + "learning_rate": 6.48962266882754e-05, + "loss": 1.8476, + "step": 856 + }, + { + "epoch": 0.38207757467677217, + "grad_norm": 0.2154144048690796, + "learning_rate": 6.484557330670623e-05, + "loss": 1.7502, + "step": 857 + }, + { + "epoch": 0.3825234061524744, + "grad_norm": 0.21206477284431458, + "learning_rate": 6.479485497156015e-05, + "loss": 1.779, + "step": 858 + }, + { + "epoch": 0.38296923762817653, + "grad_norm": 0.20155435800552368, + "learning_rate": 6.474407181542969e-05, + "loss": 1.7466, + "step": 859 + }, + { + "epoch": 0.38341506910387874, + "grad_norm": 0.212468221783638, + "learning_rate": 6.469322397107685e-05, + "loss": 1.7478, + "step": 860 + }, + { + "epoch": 0.38386090057958094, + "grad_norm": 0.21040287613868713, + "learning_rate": 6.464231157143271e-05, + "loss": 1.7352, + "step": 861 + }, + { + "epoch": 0.3843067320552831, + "grad_norm": 0.2113109678030014, + "learning_rate": 6.459133474959715e-05, + "loss": 1.7363, + "step": 862 + }, + { + "epoch": 0.3847525635309853, + "grad_norm": 0.20767450332641602, + "learning_rate": 6.454029363883845e-05, + "loss": 1.7709, + "step": 863 + }, + { + "epoch": 0.38519839500668746, + "grad_norm": 0.20580929517745972, + "learning_rate": 6.448918837259297e-05, + "loss": 1.6631, + "step": 864 + }, + { + "epoch": 0.38564422648238966, + "grad_norm": 0.20532219111919403, + "learning_rate": 6.443801908446477e-05, + "loss": 1.6743, + "step": 865 + }, + { + "epoch": 0.3860900579580918, + "grad_norm": 0.20357473194599152, + "learning_rate": 6.43867859082253e-05, + "loss": 1.7709, + "step": 866 + }, + { + "epoch": 0.386535889433794, + "grad_norm": 0.20754805207252502, + "learning_rate": 6.433548897781303e-05, + "loss": 1.7029, + "step": 867 + }, + { + "epoch": 0.38698172090949623, + "grad_norm": 0.20898325741291046, + "learning_rate": 6.428412842733312e-05, + "loss": 1.7979, + "step": 868 + }, + { + "epoch": 0.3874275523851984, + "grad_norm": 0.2059037685394287, + "learning_rate": 6.423270439105702e-05, + "loss": 1.789, + "step": 869 + }, + { + "epoch": 0.3878733838609006, + "grad_norm": 0.2053404450416565, + "learning_rate": 6.418121700342214e-05, + "loss": 1.6802, + "step": 870 + }, + { + "epoch": 0.38831921533660274, + "grad_norm": 0.20702195167541504, + "learning_rate": 6.412966639903157e-05, + "loss": 1.7664, + "step": 871 + }, + { + "epoch": 0.38876504681230495, + "grad_norm": 0.20646905899047852, + "learning_rate": 6.407805271265361e-05, + "loss": 1.7266, + "step": 872 + }, + { + "epoch": 0.38921087828800716, + "grad_norm": 0.21376605331897736, + "learning_rate": 6.40263760792215e-05, + "loss": 1.6608, + "step": 873 + }, + { + "epoch": 0.3896567097637093, + "grad_norm": 0.20318657159805298, + "learning_rate": 6.397463663383302e-05, + "loss": 1.719, + "step": 874 + }, + { + "epoch": 0.3901025412394115, + "grad_norm": 0.20452015101909637, + "learning_rate": 6.392283451175019e-05, + "loss": 1.7949, + "step": 875 + }, + { + "epoch": 0.39054837271511367, + "grad_norm": 0.21763859689235687, + "learning_rate": 6.387096984839887e-05, + "loss": 1.6594, + "step": 876 + }, + { + "epoch": 0.3909942041908159, + "grad_norm": 0.21744206547737122, + "learning_rate": 6.38190427793684e-05, + "loss": 1.8175, + "step": 877 + }, + { + "epoch": 0.3914400356665181, + "grad_norm": 0.20281130075454712, + "learning_rate": 6.376705344041132e-05, + "loss": 1.7183, + "step": 878 + }, + { + "epoch": 0.39188586714222023, + "grad_norm": 0.21118947863578796, + "learning_rate": 6.371500196744291e-05, + "loss": 1.6888, + "step": 879 + }, + { + "epoch": 0.39233169861792244, + "grad_norm": 0.22375762462615967, + "learning_rate": 6.366288849654091e-05, + "loss": 1.7068, + "step": 880 + }, + { + "epoch": 0.3927775300936246, + "grad_norm": 0.23550164699554443, + "learning_rate": 6.361071316394512e-05, + "loss": 1.7814, + "step": 881 + }, + { + "epoch": 0.3932233615693268, + "grad_norm": 0.2262086272239685, + "learning_rate": 6.355847610605709e-05, + "loss": 1.6457, + "step": 882 + }, + { + "epoch": 0.39366919304502895, + "grad_norm": 0.21293623745441437, + "learning_rate": 6.350617745943975e-05, + "loss": 1.7592, + "step": 883 + }, + { + "epoch": 0.39411502452073116, + "grad_norm": 0.23403780162334442, + "learning_rate": 6.345381736081699e-05, + "loss": 1.7122, + "step": 884 + }, + { + "epoch": 0.39456085599643337, + "grad_norm": 0.22974692285060883, + "learning_rate": 6.34013959470734e-05, + "loss": 1.663, + "step": 885 + }, + { + "epoch": 0.3950066874721355, + "grad_norm": 0.20133690536022186, + "learning_rate": 6.334891335525385e-05, + "loss": 1.684, + "step": 886 + }, + { + "epoch": 0.39545251894783773, + "grad_norm": 0.22060684859752655, + "learning_rate": 6.329636972256314e-05, + "loss": 1.7129, + "step": 887 + }, + { + "epoch": 0.3958983504235399, + "grad_norm": 0.22694548964500427, + "learning_rate": 6.324376518636562e-05, + "loss": 1.7973, + "step": 888 + }, + { + "epoch": 0.3963441818992421, + "grad_norm": 0.20457105338573456, + "learning_rate": 6.319109988418497e-05, + "loss": 1.7433, + "step": 889 + }, + { + "epoch": 0.3967900133749443, + "grad_norm": 0.2157946079969406, + "learning_rate": 6.313837395370359e-05, + "loss": 1.7765, + "step": 890 + }, + { + "epoch": 0.39723584485064645, + "grad_norm": 0.20611245930194855, + "learning_rate": 6.308558753276248e-05, + "loss": 1.744, + "step": 891 + }, + { + "epoch": 0.39768167632634865, + "grad_norm": 0.20639915764331818, + "learning_rate": 6.303274075936073e-05, + "loss": 1.7489, + "step": 892 + }, + { + "epoch": 0.3981275078020508, + "grad_norm": 0.2307731956243515, + "learning_rate": 6.297983377165521e-05, + "loss": 1.7695, + "step": 893 + }, + { + "epoch": 0.398573339277753, + "grad_norm": 0.20785124599933624, + "learning_rate": 6.292686670796023e-05, + "loss": 1.7588, + "step": 894 + }, + { + "epoch": 0.3990191707534552, + "grad_norm": 0.20284093916416168, + "learning_rate": 6.287383970674716e-05, + "loss": 1.7078, + "step": 895 + }, + { + "epoch": 0.3994650022291574, + "grad_norm": 0.20751845836639404, + "learning_rate": 6.282075290664405e-05, + "loss": 1.7742, + "step": 896 + }, + { + "epoch": 0.3999108337048596, + "grad_norm": 0.21337181329727173, + "learning_rate": 6.276760644643528e-05, + "loss": 1.7026, + "step": 897 + }, + { + "epoch": 0.40035666518056173, + "grad_norm": 0.20509934425354004, + "learning_rate": 6.271440046506117e-05, + "loss": 1.7236, + "step": 898 + }, + { + "epoch": 0.40080249665626394, + "grad_norm": 0.20551203191280365, + "learning_rate": 6.266113510161772e-05, + "loss": 1.7098, + "step": 899 + }, + { + "epoch": 0.4012483281319661, + "grad_norm": 0.20775841176509857, + "learning_rate": 6.260781049535611e-05, + "loss": 1.7432, + "step": 900 + }, + { + "epoch": 0.4016941596076683, + "grad_norm": 0.19963513314723969, + "learning_rate": 6.255442678568241e-05, + "loss": 1.6516, + "step": 901 + }, + { + "epoch": 0.4021399910833705, + "grad_norm": 0.2177877277135849, + "learning_rate": 6.250098411215723e-05, + "loss": 1.7955, + "step": 902 + }, + { + "epoch": 0.40258582255907266, + "grad_norm": 0.2058558464050293, + "learning_rate": 6.24474826144953e-05, + "loss": 1.669, + "step": 903 + }, + { + "epoch": 0.40303165403477487, + "grad_norm": 0.21106205880641937, + "learning_rate": 6.239392243256514e-05, + "loss": 1.6558, + "step": 904 + }, + { + "epoch": 0.403477485510477, + "grad_norm": 0.20492535829544067, + "learning_rate": 6.23403037063887e-05, + "loss": 1.704, + "step": 905 + }, + { + "epoch": 0.4039233169861792, + "grad_norm": 0.2018662542104721, + "learning_rate": 6.228662657614095e-05, + "loss": 1.723, + "step": 906 + }, + { + "epoch": 0.40436914846188143, + "grad_norm": 0.21367274224758148, + "learning_rate": 6.223289118214961e-05, + "loss": 1.8245, + "step": 907 + }, + { + "epoch": 0.4048149799375836, + "grad_norm": 0.25061705708503723, + "learning_rate": 6.217909766489463e-05, + "loss": 1.7306, + "step": 908 + }, + { + "epoch": 0.4052608114132858, + "grad_norm": 0.21725088357925415, + "learning_rate": 6.212524616500798e-05, + "loss": 1.7047, + "step": 909 + }, + { + "epoch": 0.40570664288898795, + "grad_norm": 0.20925213396549225, + "learning_rate": 6.20713368232732e-05, + "loss": 1.6556, + "step": 910 + }, + { + "epoch": 0.40615247436469015, + "grad_norm": 0.20636452734470367, + "learning_rate": 6.201736978062501e-05, + "loss": 1.7084, + "step": 911 + }, + { + "epoch": 0.4065983058403923, + "grad_norm": 0.23516716063022614, + "learning_rate": 6.196334517814903e-05, + "loss": 1.7051, + "step": 912 + }, + { + "epoch": 0.4070441373160945, + "grad_norm": 0.20950919389724731, + "learning_rate": 6.190926315708132e-05, + "loss": 1.6988, + "step": 913 + }, + { + "epoch": 0.4074899687917967, + "grad_norm": 0.2136993110179901, + "learning_rate": 6.185512385880807e-05, + "loss": 1.7573, + "step": 914 + }, + { + "epoch": 0.40793580026749887, + "grad_norm": 0.21377845108509064, + "learning_rate": 6.180092742486517e-05, + "loss": 1.6582, + "step": 915 + }, + { + "epoch": 0.4083816317432011, + "grad_norm": 0.21092340350151062, + "learning_rate": 6.174667399693795e-05, + "loss": 1.7061, + "step": 916 + }, + { + "epoch": 0.40882746321890323, + "grad_norm": 0.20376001298427582, + "learning_rate": 6.169236371686066e-05, + "loss": 1.663, + "step": 917 + }, + { + "epoch": 0.40927329469460544, + "grad_norm": 0.21954968571662903, + "learning_rate": 6.163799672661625e-05, + "loss": 1.7545, + "step": 918 + }, + { + "epoch": 0.40971912617030765, + "grad_norm": 0.2161523699760437, + "learning_rate": 6.158357316833587e-05, + "loss": 1.7652, + "step": 919 + }, + { + "epoch": 0.4101649576460098, + "grad_norm": 0.2133861929178238, + "learning_rate": 6.152909318429859e-05, + "loss": 1.839, + "step": 920 + }, + { + "epoch": 0.410610789121712, + "grad_norm": 0.22333158552646637, + "learning_rate": 6.147455691693098e-05, + "loss": 1.7119, + "step": 921 + }, + { + "epoch": 0.41105662059741416, + "grad_norm": 0.20326414704322815, + "learning_rate": 6.141996450880673e-05, + "loss": 1.6461, + "step": 922 + }, + { + "epoch": 0.41150245207311636, + "grad_norm": 0.20836025476455688, + "learning_rate": 6.136531610264637e-05, + "loss": 1.7686, + "step": 923 + }, + { + "epoch": 0.4119482835488186, + "grad_norm": 0.20655208826065063, + "learning_rate": 6.131061184131673e-05, + "loss": 1.7342, + "step": 924 + }, + { + "epoch": 0.4123941150245207, + "grad_norm": 0.20534458756446838, + "learning_rate": 6.125585186783073e-05, + "loss": 1.7038, + "step": 925 + }, + { + "epoch": 0.41283994650022293, + "grad_norm": 0.20873938500881195, + "learning_rate": 6.120103632534693e-05, + "loss": 1.69, + "step": 926 + }, + { + "epoch": 0.4132857779759251, + "grad_norm": 0.2036725878715515, + "learning_rate": 6.11461653571691e-05, + "loss": 1.7053, + "step": 927 + }, + { + "epoch": 0.4137316094516273, + "grad_norm": 0.2182207852602005, + "learning_rate": 6.109123910674603e-05, + "loss": 1.7051, + "step": 928 + }, + { + "epoch": 0.41417744092732944, + "grad_norm": 0.20307707786560059, + "learning_rate": 6.10362577176709e-05, + "loss": 1.7081, + "step": 929 + }, + { + "epoch": 0.41462327240303165, + "grad_norm": 0.1967494785785675, + "learning_rate": 6.098122133368114e-05, + "loss": 1.7122, + "step": 930 + }, + { + "epoch": 0.41506910387873386, + "grad_norm": 0.22913050651550293, + "learning_rate": 6.0926130098657886e-05, + "loss": 1.6915, + "step": 931 + }, + { + "epoch": 0.415514935354436, + "grad_norm": 0.20961733162403107, + "learning_rate": 6.0870984156625706e-05, + "loss": 1.6768, + "step": 932 + }, + { + "epoch": 0.4159607668301382, + "grad_norm": 0.2018282413482666, + "learning_rate": 6.0815783651752186e-05, + "loss": 1.6413, + "step": 933 + }, + { + "epoch": 0.41640659830584037, + "grad_norm": 0.20871253311634064, + "learning_rate": 6.076052872834755e-05, + "loss": 1.6137, + "step": 934 + }, + { + "epoch": 0.4168524297815426, + "grad_norm": 0.21186941862106323, + "learning_rate": 6.0705219530864294e-05, + "loss": 1.6855, + "step": 935 + }, + { + "epoch": 0.4172982612572448, + "grad_norm": 0.21418844163417816, + "learning_rate": 6.064985620389678e-05, + "loss": 1.7997, + "step": 936 + }, + { + "epoch": 0.41774409273294694, + "grad_norm": 0.1978273093700409, + "learning_rate": 6.059443889218091e-05, + "loss": 1.6214, + "step": 937 + }, + { + "epoch": 0.41818992420864914, + "grad_norm": 0.21871232986450195, + "learning_rate": 6.053896774059368e-05, + "loss": 1.6506, + "step": 938 + }, + { + "epoch": 0.4186357556843513, + "grad_norm": 0.20808328688144684, + "learning_rate": 6.0483442894152895e-05, + "loss": 1.6899, + "step": 939 + }, + { + "epoch": 0.4190815871600535, + "grad_norm": 0.2087830752134323, + "learning_rate": 6.042786449801669e-05, + "loss": 1.7265, + "step": 940 + }, + { + "epoch": 0.4195274186357557, + "grad_norm": 0.21298988163471222, + "learning_rate": 6.0372232697483194e-05, + "loss": 1.728, + "step": 941 + }, + { + "epoch": 0.41997325011145786, + "grad_norm": 0.20861542224884033, + "learning_rate": 6.031654763799017e-05, + "loss": 1.7138, + "step": 942 + }, + { + "epoch": 0.42041908158716007, + "grad_norm": 0.21002022922039032, + "learning_rate": 6.026080946511463e-05, + "loss": 1.6858, + "step": 943 + }, + { + "epoch": 0.4208649130628622, + "grad_norm": 0.20414891839027405, + "learning_rate": 6.020501832457238e-05, + "loss": 1.6575, + "step": 944 + }, + { + "epoch": 0.42131074453856443, + "grad_norm": 0.21040669083595276, + "learning_rate": 6.014917436221777e-05, + "loss": 1.7012, + "step": 945 + }, + { + "epoch": 0.4217565760142666, + "grad_norm": 0.20173679292201996, + "learning_rate": 6.0093277724043194e-05, + "loss": 1.6422, + "step": 946 + }, + { + "epoch": 0.4222024074899688, + "grad_norm": 0.21155527234077454, + "learning_rate": 6.0037328556178786e-05, + "loss": 1.7072, + "step": 947 + }, + { + "epoch": 0.422648238965671, + "grad_norm": 0.217556893825531, + "learning_rate": 5.998132700489197e-05, + "loss": 1.6765, + "step": 948 + }, + { + "epoch": 0.42309407044137315, + "grad_norm": 0.20571884512901306, + "learning_rate": 5.9925273216587144e-05, + "loss": 1.687, + "step": 949 + }, + { + "epoch": 0.42353990191707536, + "grad_norm": 0.21783576905727386, + "learning_rate": 5.9869167337805266e-05, + "loss": 1.7004, + "step": 950 + }, + { + "epoch": 0.4239857333927775, + "grad_norm": 0.2003163993358612, + "learning_rate": 5.981300951522349e-05, + "loss": 1.7159, + "step": 951 + }, + { + "epoch": 0.4244315648684797, + "grad_norm": 0.21480540931224823, + "learning_rate": 5.975679989565472e-05, + "loss": 1.7743, + "step": 952 + }, + { + "epoch": 0.4248773963441819, + "grad_norm": 0.21497845649719238, + "learning_rate": 5.970053862604732e-05, + "loss": 1.7432, + "step": 953 + }, + { + "epoch": 0.4253232278198841, + "grad_norm": 0.2040768414735794, + "learning_rate": 5.9644225853484644e-05, + "loss": 1.664, + "step": 954 + }, + { + "epoch": 0.4257690592955863, + "grad_norm": 0.20544065535068512, + "learning_rate": 5.958786172518473e-05, + "loss": 1.7423, + "step": 955 + }, + { + "epoch": 0.42621489077128843, + "grad_norm": 0.2030346691608429, + "learning_rate": 5.9531446388499816e-05, + "loss": 1.7023, + "step": 956 + }, + { + "epoch": 0.42666072224699064, + "grad_norm": 0.2168920785188675, + "learning_rate": 5.9474979990916094e-05, + "loss": 1.7188, + "step": 957 + }, + { + "epoch": 0.4271065537226928, + "grad_norm": 0.21075597405433655, + "learning_rate": 5.941846268005318e-05, + "loss": 1.8113, + "step": 958 + }, + { + "epoch": 0.427552385198395, + "grad_norm": 0.21342115104198456, + "learning_rate": 5.936189460366381e-05, + "loss": 1.7255, + "step": 959 + }, + { + "epoch": 0.4279982166740972, + "grad_norm": 0.21643872559070587, + "learning_rate": 5.930527590963343e-05, + "loss": 1.7867, + "step": 960 + }, + { + "epoch": 0.42844404814979936, + "grad_norm": 0.20463012158870697, + "learning_rate": 5.924860674597985e-05, + "loss": 1.6338, + "step": 961 + }, + { + "epoch": 0.42888987962550157, + "grad_norm": 0.19709286093711853, + "learning_rate": 5.919188726085278e-05, + "loss": 1.6746, + "step": 962 + }, + { + "epoch": 0.4293357111012037, + "grad_norm": 0.21841242909431458, + "learning_rate": 5.913511760253349e-05, + "loss": 1.7343, + "step": 963 + }, + { + "epoch": 0.42978154257690593, + "grad_norm": 0.2185276448726654, + "learning_rate": 5.907829791943445e-05, + "loss": 1.737, + "step": 964 + }, + { + "epoch": 0.43022737405260814, + "grad_norm": 0.22077877819538116, + "learning_rate": 5.902142836009888e-05, + "loss": 1.6816, + "step": 965 + }, + { + "epoch": 0.4306732055283103, + "grad_norm": 0.21090662479400635, + "learning_rate": 5.896450907320039e-05, + "loss": 1.7422, + "step": 966 + }, + { + "epoch": 0.4311190370040125, + "grad_norm": 0.2055198848247528, + "learning_rate": 5.8907540207542616e-05, + "loss": 1.7427, + "step": 967 + }, + { + "epoch": 0.43156486847971465, + "grad_norm": 0.22637182474136353, + "learning_rate": 5.8850521912058785e-05, + "loss": 1.7224, + "step": 968 + }, + { + "epoch": 0.43201069995541685, + "grad_norm": 0.20119556784629822, + "learning_rate": 5.8793454335811355e-05, + "loss": 1.7027, + "step": 969 + }, + { + "epoch": 0.43245653143111906, + "grad_norm": 0.20795485377311707, + "learning_rate": 5.873633762799161e-05, + "loss": 1.641, + "step": 970 + }, + { + "epoch": 0.4329023629068212, + "grad_norm": 0.21166804432868958, + "learning_rate": 5.867917193791931e-05, + "loss": 1.7194, + "step": 971 + }, + { + "epoch": 0.4333481943825234, + "grad_norm": 0.20624929666519165, + "learning_rate": 5.862195741504222e-05, + "loss": 1.7359, + "step": 972 + }, + { + "epoch": 0.4337940258582256, + "grad_norm": 0.20094981789588928, + "learning_rate": 5.856469420893582e-05, + "loss": 1.6179, + "step": 973 + }, + { + "epoch": 0.4342398573339278, + "grad_norm": 0.21095781028270721, + "learning_rate": 5.850738246930281e-05, + "loss": 1.693, + "step": 974 + }, + { + "epoch": 0.43468568880962993, + "grad_norm": 0.20249047875404358, + "learning_rate": 5.8450022345972814e-05, + "loss": 1.6792, + "step": 975 + }, + { + "epoch": 0.43513152028533214, + "grad_norm": 0.2042103111743927, + "learning_rate": 5.839261398890193e-05, + "loss": 1.6591, + "step": 976 + }, + { + "epoch": 0.43557735176103435, + "grad_norm": 0.20385371148586273, + "learning_rate": 5.833515754817233e-05, + "loss": 1.7215, + "step": 977 + }, + { + "epoch": 0.4360231832367365, + "grad_norm": 0.20536215603351593, + "learning_rate": 5.827765317399194e-05, + "loss": 1.7272, + "step": 978 + }, + { + "epoch": 0.4364690147124387, + "grad_norm": 0.20327118039131165, + "learning_rate": 5.822010101669396e-05, + "loss": 1.7604, + "step": 979 + }, + { + "epoch": 0.43691484618814086, + "grad_norm": 0.20475706458091736, + "learning_rate": 5.816250122673652e-05, + "loss": 1.7537, + "step": 980 + }, + { + "epoch": 0.43736067766384307, + "grad_norm": 0.2161991149187088, + "learning_rate": 5.810485395470226e-05, + "loss": 1.7594, + "step": 981 + }, + { + "epoch": 0.4378065091395453, + "grad_norm": 0.22308722138404846, + "learning_rate": 5.804715935129798e-05, + "loss": 1.7299, + "step": 982 + }, + { + "epoch": 0.4382523406152474, + "grad_norm": 0.20231178402900696, + "learning_rate": 5.7989417567354205e-05, + "loss": 1.6728, + "step": 983 + }, + { + "epoch": 0.43869817209094963, + "grad_norm": 0.202326238155365, + "learning_rate": 5.7931628753824794e-05, + "loss": 1.6145, + "step": 984 + }, + { + "epoch": 0.4391440035666518, + "grad_norm": 0.20635566115379333, + "learning_rate": 5.7873793061786583e-05, + "loss": 1.7231, + "step": 985 + }, + { + "epoch": 0.439589835042354, + "grad_norm": 0.2267482429742813, + "learning_rate": 5.781591064243893e-05, + "loss": 1.7359, + "step": 986 + }, + { + "epoch": 0.4400356665180562, + "grad_norm": 0.20149658620357513, + "learning_rate": 5.7757981647103376e-05, + "loss": 1.6807, + "step": 987 + }, + { + "epoch": 0.44048149799375835, + "grad_norm": 0.20177322626113892, + "learning_rate": 5.7700006227223184e-05, + "loss": 1.594, + "step": 988 + }, + { + "epoch": 0.44092732946946056, + "grad_norm": 0.2092740684747696, + "learning_rate": 5.764198453436305e-05, + "loss": 1.6726, + "step": 989 + }, + { + "epoch": 0.4413731609451627, + "grad_norm": 0.2026442438364029, + "learning_rate": 5.75839167202086e-05, + "loss": 1.7513, + "step": 990 + }, + { + "epoch": 0.4418189924208649, + "grad_norm": 0.21950103342533112, + "learning_rate": 5.752580293656602e-05, + "loss": 1.7699, + "step": 991 + }, + { + "epoch": 0.44226482389656707, + "grad_norm": 0.2102147489786148, + "learning_rate": 5.7467643335361726e-05, + "loss": 1.6979, + "step": 992 + }, + { + "epoch": 0.4427106553722693, + "grad_norm": 0.19826726615428925, + "learning_rate": 5.7409438068641846e-05, + "loss": 1.6443, + "step": 993 + }, + { + "epoch": 0.4431564868479715, + "grad_norm": 0.21155422925949097, + "learning_rate": 5.735118728857195e-05, + "loss": 1.7602, + "step": 994 + }, + { + "epoch": 0.44360231832367364, + "grad_norm": 0.21589472889900208, + "learning_rate": 5.7292891147436564e-05, + "loss": 1.7669, + "step": 995 + }, + { + "epoch": 0.44404814979937585, + "grad_norm": 0.20023344457149506, + "learning_rate": 5.723454979763882e-05, + "loss": 1.7034, + "step": 996 + }, + { + "epoch": 0.444493981275078, + "grad_norm": 0.20109406113624573, + "learning_rate": 5.7176163391700014e-05, + "loss": 1.6577, + "step": 997 + }, + { + "epoch": 0.4449398127507802, + "grad_norm": 0.22041814029216766, + "learning_rate": 5.711773208225925e-05, + "loss": 1.7057, + "step": 998 + }, + { + "epoch": 0.4453856442264824, + "grad_norm": 0.2066877782344818, + "learning_rate": 5.7059256022073005e-05, + "loss": 1.6899, + "step": 999 + }, + { + "epoch": 0.44583147570218457, + "grad_norm": 0.21687161922454834, + "learning_rate": 5.700073536401478e-05, + "loss": 1.7737, + "step": 1000 + }, + { + "epoch": 0.4462773071778868, + "grad_norm": 0.2104395478963852, + "learning_rate": 5.694217026107463e-05, + "loss": 1.7763, + "step": 1001 + }, + { + "epoch": 0.4467231386535889, + "grad_norm": 0.20415447652339935, + "learning_rate": 5.688356086635883e-05, + "loss": 1.7233, + "step": 1002 + }, + { + "epoch": 0.44716897012929113, + "grad_norm": 0.2126995176076889, + "learning_rate": 5.6824907333089436e-05, + "loss": 1.7473, + "step": 1003 + }, + { + "epoch": 0.44761480160499334, + "grad_norm": 0.2019248604774475, + "learning_rate": 5.676620981460389e-05, + "loss": 1.7139, + "step": 1004 + }, + { + "epoch": 0.4480606330806955, + "grad_norm": 0.20638827979564667, + "learning_rate": 5.670746846435462e-05, + "loss": 1.6889, + "step": 1005 + }, + { + "epoch": 0.4485064645563977, + "grad_norm": 0.2177630215883255, + "learning_rate": 5.6648683435908666e-05, + "loss": 1.7213, + "step": 1006 + }, + { + "epoch": 0.44895229603209985, + "grad_norm": 0.21029476821422577, + "learning_rate": 5.658985488294725e-05, + "loss": 1.6411, + "step": 1007 + }, + { + "epoch": 0.44939812750780206, + "grad_norm": 0.20168250799179077, + "learning_rate": 5.6530982959265364e-05, + "loss": 1.6888, + "step": 1008 + }, + { + "epoch": 0.4498439589835042, + "grad_norm": 0.21509665250778198, + "learning_rate": 5.647206781877138e-05, + "loss": 1.716, + "step": 1009 + }, + { + "epoch": 0.4502897904592064, + "grad_norm": 0.20430870354175568, + "learning_rate": 5.641310961548667e-05, + "loss": 1.7226, + "step": 1010 + }, + { + "epoch": 0.4507356219349086, + "grad_norm": 0.20695053040981293, + "learning_rate": 5.6354108503545175e-05, + "loss": 1.7164, + "step": 1011 + }, + { + "epoch": 0.4511814534106108, + "grad_norm": 0.21358413994312286, + "learning_rate": 5.629506463719303e-05, + "loss": 1.7774, + "step": 1012 + }, + { + "epoch": 0.451627284886313, + "grad_norm": 0.19115769863128662, + "learning_rate": 5.623597817078811e-05, + "loss": 1.5483, + "step": 1013 + }, + { + "epoch": 0.45207311636201514, + "grad_norm": 0.21836210787296295, + "learning_rate": 5.6176849258799677e-05, + "loss": 1.7012, + "step": 1014 + }, + { + "epoch": 0.45251894783771734, + "grad_norm": 0.21280039846897125, + "learning_rate": 5.611767805580796e-05, + "loss": 1.6831, + "step": 1015 + }, + { + "epoch": 0.45296477931341955, + "grad_norm": 0.2034977525472641, + "learning_rate": 5.605846471650374e-05, + "loss": 1.6508, + "step": 1016 + }, + { + "epoch": 0.4534106107891217, + "grad_norm": 0.21081766486167908, + "learning_rate": 5.5999209395687974e-05, + "loss": 1.7487, + "step": 1017 + }, + { + "epoch": 0.4538564422648239, + "grad_norm": 0.21929119527339935, + "learning_rate": 5.593991224827134e-05, + "loss": 1.6747, + "step": 1018 + }, + { + "epoch": 0.45430227374052606, + "grad_norm": 0.2167121320962906, + "learning_rate": 5.58805734292739e-05, + "loss": 1.7495, + "step": 1019 + }, + { + "epoch": 0.45474810521622827, + "grad_norm": 0.20334959030151367, + "learning_rate": 5.582119309382463e-05, + "loss": 1.7429, + "step": 1020 + }, + { + "epoch": 0.4551939366919304, + "grad_norm": 0.23131625354290009, + "learning_rate": 5.5761771397161036e-05, + "loss": 1.7499, + "step": 1021 + }, + { + "epoch": 0.45563976816763263, + "grad_norm": 0.22897028923034668, + "learning_rate": 5.5702308494628785e-05, + "loss": 1.768, + "step": 1022 + }, + { + "epoch": 0.45608559964333484, + "grad_norm": 0.2257610410451889, + "learning_rate": 5.564280454168125e-05, + "loss": 1.7668, + "step": 1023 + }, + { + "epoch": 0.456531431119037, + "grad_norm": 0.21824105083942413, + "learning_rate": 5.5583259693879126e-05, + "loss": 1.7319, + "step": 1024 + }, + { + "epoch": 0.4569772625947392, + "grad_norm": 0.21084171533584595, + "learning_rate": 5.552367410688999e-05, + "loss": 1.7295, + "step": 1025 + }, + { + "epoch": 0.45742309407044135, + "grad_norm": 0.21336546540260315, + "learning_rate": 5.5464047936487986e-05, + "loss": 1.7079, + "step": 1026 + }, + { + "epoch": 0.45786892554614356, + "grad_norm": 0.21249566972255707, + "learning_rate": 5.540438133855329e-05, + "loss": 1.621, + "step": 1027 + }, + { + "epoch": 0.45831475702184576, + "grad_norm": 0.21947209537029266, + "learning_rate": 5.5344674469071824e-05, + "loss": 1.753, + "step": 1028 + }, + { + "epoch": 0.4587605884975479, + "grad_norm": 0.2173677682876587, + "learning_rate": 5.5284927484134736e-05, + "loss": 1.8318, + "step": 1029 + }, + { + "epoch": 0.4592064199732501, + "grad_norm": 0.21153312921524048, + "learning_rate": 5.52251405399381e-05, + "loss": 1.7093, + "step": 1030 + }, + { + "epoch": 0.4596522514489523, + "grad_norm": 0.2167084962129593, + "learning_rate": 5.51653137927824e-05, + "loss": 1.6376, + "step": 1031 + }, + { + "epoch": 0.4600980829246545, + "grad_norm": 0.21005761623382568, + "learning_rate": 5.5105447399072225e-05, + "loss": 1.6995, + "step": 1032 + }, + { + "epoch": 0.4605439144003567, + "grad_norm": 0.20497234165668488, + "learning_rate": 5.504554151531578e-05, + "loss": 1.6765, + "step": 1033 + }, + { + "epoch": 0.46098974587605884, + "grad_norm": 0.21576657891273499, + "learning_rate": 5.4985596298124555e-05, + "loss": 1.7386, + "step": 1034 + }, + { + "epoch": 0.46143557735176105, + "grad_norm": 0.21078845858573914, + "learning_rate": 5.492561190421281e-05, + "loss": 1.7177, + "step": 1035 + }, + { + "epoch": 0.4618814088274632, + "grad_norm": 0.20729662477970123, + "learning_rate": 5.4865588490397246e-05, + "loss": 1.7104, + "step": 1036 + }, + { + "epoch": 0.4623272403031654, + "grad_norm": 0.20453870296478271, + "learning_rate": 5.480552621359659e-05, + "loss": 1.6448, + "step": 1037 + }, + { + "epoch": 0.46277307177886756, + "grad_norm": 0.20621220767498016, + "learning_rate": 5.4745425230831136e-05, + "loss": 1.6781, + "step": 1038 + }, + { + "epoch": 0.46321890325456977, + "grad_norm": 0.2012334018945694, + "learning_rate": 5.468528569922241e-05, + "loss": 1.6901, + "step": 1039 + }, + { + "epoch": 0.463664734730272, + "grad_norm": 0.21340420842170715, + "learning_rate": 5.462510777599268e-05, + "loss": 1.7648, + "step": 1040 + }, + { + "epoch": 0.46411056620597413, + "grad_norm": 0.21295180916786194, + "learning_rate": 5.4564891618464595e-05, + "loss": 1.6664, + "step": 1041 + }, + { + "epoch": 0.46455639768167634, + "grad_norm": 0.20959512889385223, + "learning_rate": 5.450463738406077e-05, + "loss": 1.717, + "step": 1042 + }, + { + "epoch": 0.4650022291573785, + "grad_norm": 0.21654817461967468, + "learning_rate": 5.4444345230303325e-05, + "loss": 1.6852, + "step": 1043 + }, + { + "epoch": 0.4654480606330807, + "grad_norm": 0.2170281559228897, + "learning_rate": 5.438401531481357e-05, + "loss": 1.7399, + "step": 1044 + }, + { + "epoch": 0.4658938921087829, + "grad_norm": 0.21242597699165344, + "learning_rate": 5.4323647795311474e-05, + "loss": 1.7651, + "step": 1045 + }, + { + "epoch": 0.46633972358448506, + "grad_norm": 0.2083970457315445, + "learning_rate": 5.4263242829615376e-05, + "loss": 1.675, + "step": 1046 + }, + { + "epoch": 0.46678555506018726, + "grad_norm": 0.218455970287323, + "learning_rate": 5.420280057564146e-05, + "loss": 1.7243, + "step": 1047 + }, + { + "epoch": 0.4672313865358894, + "grad_norm": 0.2167058140039444, + "learning_rate": 5.414232119140341e-05, + "loss": 1.7501, + "step": 1048 + }, + { + "epoch": 0.4676772180115916, + "grad_norm": 0.22700956463813782, + "learning_rate": 5.408180483501197e-05, + "loss": 1.744, + "step": 1049 + }, + { + "epoch": 0.46812304948729383, + "grad_norm": 0.21506710350513458, + "learning_rate": 5.402125166467458e-05, + "loss": 1.7214, + "step": 1050 + }, + { + "epoch": 0.468568880962996, + "grad_norm": 0.23372529447078705, + "learning_rate": 5.396066183869486e-05, + "loss": 1.7585, + "step": 1051 + }, + { + "epoch": 0.4690147124386982, + "grad_norm": 0.21193209290504456, + "learning_rate": 5.390003551547231e-05, + "loss": 1.7052, + "step": 1052 + }, + { + "epoch": 0.46946054391440034, + "grad_norm": 0.20639494061470032, + "learning_rate": 5.383937285350184e-05, + "loss": 1.7743, + "step": 1053 + }, + { + "epoch": 0.46990637539010255, + "grad_norm": 0.21708635985851288, + "learning_rate": 5.377867401137332e-05, + "loss": 1.553, + "step": 1054 + }, + { + "epoch": 0.4703522068658047, + "grad_norm": 0.21908405423164368, + "learning_rate": 5.371793914777124e-05, + "loss": 1.6, + "step": 1055 + }, + { + "epoch": 0.4707980383415069, + "grad_norm": 0.21232865750789642, + "learning_rate": 5.365716842147425e-05, + "loss": 1.7168, + "step": 1056 + }, + { + "epoch": 0.4712438698172091, + "grad_norm": 0.21403208374977112, + "learning_rate": 5.359636199135476e-05, + "loss": 1.6639, + "step": 1057 + }, + { + "epoch": 0.47168970129291127, + "grad_norm": 0.22724024951457977, + "learning_rate": 5.353552001637852e-05, + "loss": 1.7488, + "step": 1058 + }, + { + "epoch": 0.4721355327686135, + "grad_norm": 0.20754577219486237, + "learning_rate": 5.347464265560418e-05, + "loss": 1.7318, + "step": 1059 + }, + { + "epoch": 0.4725813642443156, + "grad_norm": 0.20320498943328857, + "learning_rate": 5.3413730068182956e-05, + "loss": 1.7127, + "step": 1060 + }, + { + "epoch": 0.47302719572001783, + "grad_norm": 0.21433480083942413, + "learning_rate": 5.33527824133581e-05, + "loss": 1.7074, + "step": 1061 + }, + { + "epoch": 0.47347302719572004, + "grad_norm": 0.22244124114513397, + "learning_rate": 5.329179985046457e-05, + "loss": 1.7323, + "step": 1062 + }, + { + "epoch": 0.4739188586714222, + "grad_norm": 0.21724256873130798, + "learning_rate": 5.323078253892858e-05, + "loss": 1.7213, + "step": 1063 + }, + { + "epoch": 0.4743646901471244, + "grad_norm": 0.20553891360759735, + "learning_rate": 5.3169730638267165e-05, + "loss": 1.6503, + "step": 1064 + }, + { + "epoch": 0.47481052162282655, + "grad_norm": 0.21299302577972412, + "learning_rate": 5.3108644308087827e-05, + "loss": 1.7829, + "step": 1065 + }, + { + "epoch": 0.47525635309852876, + "grad_norm": 0.21917884051799774, + "learning_rate": 5.3047523708088034e-05, + "loss": 1.6767, + "step": 1066 + }, + { + "epoch": 0.4757021845742309, + "grad_norm": 0.20395790040493011, + "learning_rate": 5.298636899805487e-05, + "loss": 1.6184, + "step": 1067 + }, + { + "epoch": 0.4761480160499331, + "grad_norm": 0.20350614190101624, + "learning_rate": 5.2925180337864595e-05, + "loss": 1.7644, + "step": 1068 + }, + { + "epoch": 0.47659384752563533, + "grad_norm": 0.2148612141609192, + "learning_rate": 5.286395788748222e-05, + "loss": 1.7269, + "step": 1069 + }, + { + "epoch": 0.4770396790013375, + "grad_norm": 0.215896874666214, + "learning_rate": 5.280270180696109e-05, + "loss": 1.6776, + "step": 1070 + }, + { + "epoch": 0.4774855104770397, + "grad_norm": 0.20220790803432465, + "learning_rate": 5.274141225644245e-05, + "loss": 1.6781, + "step": 1071 + }, + { + "epoch": 0.47793134195274184, + "grad_norm": 0.21465882658958435, + "learning_rate": 5.2680089396155094e-05, + "loss": 1.7428, + "step": 1072 + }, + { + "epoch": 0.47837717342844405, + "grad_norm": 0.20903639495372772, + "learning_rate": 5.261873338641485e-05, + "loss": 1.6478, + "step": 1073 + }, + { + "epoch": 0.47882300490414625, + "grad_norm": 0.2066495567560196, + "learning_rate": 5.2557344387624224e-05, + "loss": 1.6382, + "step": 1074 + }, + { + "epoch": 0.4792688363798484, + "grad_norm": 0.19991101324558258, + "learning_rate": 5.2495922560271996e-05, + "loss": 1.7175, + "step": 1075 + }, + { + "epoch": 0.4797146678555506, + "grad_norm": 0.2088199108839035, + "learning_rate": 5.243446806493271e-05, + "loss": 1.6792, + "step": 1076 + }, + { + "epoch": 0.48016049933125277, + "grad_norm": 0.21749664843082428, + "learning_rate": 5.237298106226635e-05, + "loss": 1.7521, + "step": 1077 + }, + { + "epoch": 0.480606330806955, + "grad_norm": 0.2156524360179901, + "learning_rate": 5.23114617130179e-05, + "loss": 1.6828, + "step": 1078 + }, + { + "epoch": 0.4810521622826572, + "grad_norm": 0.20324012637138367, + "learning_rate": 5.224991017801687e-05, + "loss": 1.6673, + "step": 1079 + }, + { + "epoch": 0.48149799375835933, + "grad_norm": 0.20556019246578217, + "learning_rate": 5.218832661817692e-05, + "loss": 1.7317, + "step": 1080 + }, + { + "epoch": 0.48194382523406154, + "grad_norm": 0.212691530585289, + "learning_rate": 5.212671119449546e-05, + "loss": 1.6082, + "step": 1081 + }, + { + "epoch": 0.4823896567097637, + "grad_norm": 0.2099040448665619, + "learning_rate": 5.206506406805318e-05, + "loss": 1.7698, + "step": 1082 + }, + { + "epoch": 0.4828354881854659, + "grad_norm": 0.2126045823097229, + "learning_rate": 5.200338540001364e-05, + "loss": 1.6696, + "step": 1083 + }, + { + "epoch": 0.48328131966116805, + "grad_norm": 0.2191324681043625, + "learning_rate": 5.1941675351622874e-05, + "loss": 1.7187, + "step": 1084 + }, + { + "epoch": 0.48372715113687026, + "grad_norm": 0.20090503990650177, + "learning_rate": 5.187993408420897e-05, + "loss": 1.639, + "step": 1085 + }, + { + "epoch": 0.48417298261257247, + "grad_norm": 0.22327227890491486, + "learning_rate": 5.18181617591816e-05, + "loss": 1.6796, + "step": 1086 + }, + { + "epoch": 0.4846188140882746, + "grad_norm": 0.23542605340480804, + "learning_rate": 5.175635853803163e-05, + "loss": 1.7129, + "step": 1087 + }, + { + "epoch": 0.4850646455639768, + "grad_norm": 0.20205427706241608, + "learning_rate": 5.169452458233074e-05, + "loss": 1.6839, + "step": 1088 + }, + { + "epoch": 0.485510477039679, + "grad_norm": 0.23833878338336945, + "learning_rate": 5.163266005373089e-05, + "loss": 1.7784, + "step": 1089 + }, + { + "epoch": 0.4859563085153812, + "grad_norm": 0.2012386918067932, + "learning_rate": 5.157076511396405e-05, + "loss": 1.5859, + "step": 1090 + }, + { + "epoch": 0.4864021399910834, + "grad_norm": 0.20237721502780914, + "learning_rate": 5.150883992484162e-05, + "loss": 1.7051, + "step": 1091 + }, + { + "epoch": 0.48684797146678555, + "grad_norm": 0.20908315479755402, + "learning_rate": 5.144688464825413e-05, + "loss": 1.7357, + "step": 1092 + }, + { + "epoch": 0.48729380294248775, + "grad_norm": 0.2189488708972931, + "learning_rate": 5.138489944617071e-05, + "loss": 1.7457, + "step": 1093 + }, + { + "epoch": 0.4877396344181899, + "grad_norm": 0.19439110159873962, + "learning_rate": 5.132288448063879e-05, + "loss": 1.5064, + "step": 1094 + }, + { + "epoch": 0.4881854658938921, + "grad_norm": 0.19582004845142365, + "learning_rate": 5.126083991378359e-05, + "loss": 1.6382, + "step": 1095 + }, + { + "epoch": 0.4886312973695943, + "grad_norm": 0.21001210808753967, + "learning_rate": 5.11987659078077e-05, + "loss": 1.7204, + "step": 1096 + }, + { + "epoch": 0.48907712884529647, + "grad_norm": 0.22297264635562897, + "learning_rate": 5.113666262499067e-05, + "loss": 1.7857, + "step": 1097 + }, + { + "epoch": 0.4895229603209987, + "grad_norm": 0.20997293293476105, + "learning_rate": 5.1074530227688605e-05, + "loss": 1.7473, + "step": 1098 + }, + { + "epoch": 0.48996879179670083, + "grad_norm": 0.22429385781288147, + "learning_rate": 5.101236887833372e-05, + "loss": 1.7797, + "step": 1099 + }, + { + "epoch": 0.49041462327240304, + "grad_norm": 0.19816453754901886, + "learning_rate": 5.09501787394339e-05, + "loss": 1.6346, + "step": 1100 + }, + { + "epoch": 0.4908604547481052, + "grad_norm": 0.20720797777175903, + "learning_rate": 5.088795997357235e-05, + "loss": 1.6189, + "step": 1101 + }, + { + "epoch": 0.4913062862238074, + "grad_norm": 0.22230872511863708, + "learning_rate": 5.0825712743407034e-05, + "loss": 1.6586, + "step": 1102 + }, + { + "epoch": 0.4917521176995096, + "grad_norm": 0.2059677243232727, + "learning_rate": 5.07634372116704e-05, + "loss": 1.6611, + "step": 1103 + }, + { + "epoch": 0.49219794917521176, + "grad_norm": 0.21269652247428894, + "learning_rate": 5.070113354116885e-05, + "loss": 1.7192, + "step": 1104 + }, + { + "epoch": 0.49264378065091396, + "grad_norm": 0.21084697544574738, + "learning_rate": 5.0638801894782325e-05, + "loss": 1.6478, + "step": 1105 + }, + { + "epoch": 0.4930896121266161, + "grad_norm": 0.20062986016273499, + "learning_rate": 5.0576442435463976e-05, + "loss": 1.6797, + "step": 1106 + }, + { + "epoch": 0.4935354436023183, + "grad_norm": 0.20981177687644958, + "learning_rate": 5.0514055326239586e-05, + "loss": 1.661, + "step": 1107 + }, + { + "epoch": 0.49398127507802053, + "grad_norm": 0.20959115028381348, + "learning_rate": 5.045164073020729e-05, + "loss": 1.6883, + "step": 1108 + }, + { + "epoch": 0.4944271065537227, + "grad_norm": 0.21022839844226837, + "learning_rate": 5.038919881053704e-05, + "loss": 1.7269, + "step": 1109 + }, + { + "epoch": 0.4948729380294249, + "grad_norm": 0.1999373883008957, + "learning_rate": 5.0326729730470216e-05, + "loss": 1.6365, + "step": 1110 + }, + { + "epoch": 0.49531876950512704, + "grad_norm": 0.2134290188550949, + "learning_rate": 5.0264233653319225e-05, + "loss": 1.7073, + "step": 1111 + }, + { + "epoch": 0.49576460098082925, + "grad_norm": 0.21585135161876678, + "learning_rate": 5.020171074246707e-05, + "loss": 1.6849, + "step": 1112 + }, + { + "epoch": 0.49621043245653146, + "grad_norm": 0.2069379687309265, + "learning_rate": 5.013916116136684e-05, + "loss": 1.7254, + "step": 1113 + }, + { + "epoch": 0.4966562639322336, + "grad_norm": 0.2296307384967804, + "learning_rate": 5.0076585073541406e-05, + "loss": 1.6823, + "step": 1114 + }, + { + "epoch": 0.4971020954079358, + "grad_norm": 0.2026946246623993, + "learning_rate": 5.001398264258292e-05, + "loss": 1.7022, + "step": 1115 + }, + { + "epoch": 0.49754792688363797, + "grad_norm": 0.21262523531913757, + "learning_rate": 4.9951354032152386e-05, + "loss": 1.7314, + "step": 1116 + }, + { + "epoch": 0.4979937583593402, + "grad_norm": 0.20525331795215607, + "learning_rate": 4.988869940597929e-05, + "loss": 1.6811, + "step": 1117 + }, + { + "epoch": 0.49843958983504233, + "grad_norm": 0.21029238402843475, + "learning_rate": 4.9826018927861076e-05, + "loss": 1.753, + "step": 1118 + }, + { + "epoch": 0.49888542131074454, + "grad_norm": 0.2149396538734436, + "learning_rate": 4.9763312761662825e-05, + "loss": 1.6747, + "step": 1119 + }, + { + "epoch": 0.49933125278644674, + "grad_norm": 0.21204285323619843, + "learning_rate": 4.9700581071316716e-05, + "loss": 1.6627, + "step": 1120 + }, + { + "epoch": 0.4997770842621489, + "grad_norm": 0.21056927740573883, + "learning_rate": 4.963782402082171e-05, + "loss": 1.7502, + "step": 1121 + }, + { + "epoch": 0.500222915737851, + "grad_norm": 0.20707182586193085, + "learning_rate": 4.957504177424304e-05, + "loss": 1.677, + "step": 1122 + }, + { + "epoch": 0.5006687472135533, + "grad_norm": 0.21892797946929932, + "learning_rate": 4.951223449571182e-05, + "loss": 1.7035, + "step": 1123 + }, + { + "epoch": 0.5011145786892555, + "grad_norm": 0.20630088448524475, + "learning_rate": 4.94494023494246e-05, + "loss": 1.6476, + "step": 1124 + }, + { + "epoch": 0.5015604101649577, + "grad_norm": 0.20754548907279968, + "learning_rate": 4.938654549964294e-05, + "loss": 1.6779, + "step": 1125 + }, + { + "epoch": 0.5020062416406599, + "grad_norm": 0.22580070793628693, + "learning_rate": 4.932366411069296e-05, + "loss": 1.6127, + "step": 1126 + }, + { + "epoch": 0.502452073116362, + "grad_norm": 0.20777414739131927, + "learning_rate": 4.926075834696498e-05, + "loss": 1.6834, + "step": 1127 + }, + { + "epoch": 0.5028979045920642, + "grad_norm": 0.23007522523403168, + "learning_rate": 4.919782837291301e-05, + "loss": 1.7658, + "step": 1128 + }, + { + "epoch": 0.5033437360677664, + "grad_norm": 0.2209630161523819, + "learning_rate": 4.9134874353054376e-05, + "loss": 1.5629, + "step": 1129 + }, + { + "epoch": 0.5037895675434686, + "grad_norm": 0.20336566865444183, + "learning_rate": 4.907189645196923e-05, + "loss": 1.7168, + "step": 1130 + }, + { + "epoch": 0.5042353990191708, + "grad_norm": 0.21301651000976562, + "learning_rate": 4.9008894834300187e-05, + "loss": 1.6621, + "step": 1131 + }, + { + "epoch": 0.5046812304948729, + "grad_norm": 0.24041074514389038, + "learning_rate": 4.894586966475186e-05, + "loss": 1.6621, + "step": 1132 + }, + { + "epoch": 0.5051270619705751, + "grad_norm": 0.20694510638713837, + "learning_rate": 4.8882821108090406e-05, + "loss": 1.7199, + "step": 1133 + }, + { + "epoch": 0.5055728934462773, + "grad_norm": 0.2116130143404007, + "learning_rate": 4.881974932914318e-05, + "loss": 1.6807, + "step": 1134 + }, + { + "epoch": 0.5060187249219795, + "grad_norm": 0.2578369379043579, + "learning_rate": 4.8756654492798186e-05, + "loss": 1.7366, + "step": 1135 + }, + { + "epoch": 0.5064645563976817, + "grad_norm": 0.19718419015407562, + "learning_rate": 4.8693536764003744e-05, + "loss": 1.6122, + "step": 1136 + }, + { + "epoch": 0.5069103878733838, + "grad_norm": 0.2349787950515747, + "learning_rate": 4.8630396307768015e-05, + "loss": 1.6636, + "step": 1137 + }, + { + "epoch": 0.507356219349086, + "grad_norm": 0.23219063878059387, + "learning_rate": 4.856723328915856e-05, + "loss": 1.7218, + "step": 1138 + }, + { + "epoch": 0.5078020508247882, + "grad_norm": 0.20803090929985046, + "learning_rate": 4.850404787330194e-05, + "loss": 1.7176, + "step": 1139 + }, + { + "epoch": 0.5082478823004905, + "grad_norm": 0.21287134289741516, + "learning_rate": 4.844084022538328e-05, + "loss": 1.6772, + "step": 1140 + }, + { + "epoch": 0.5086937137761925, + "grad_norm": 0.2394491583108902, + "learning_rate": 4.837761051064579e-05, + "loss": 1.7254, + "step": 1141 + }, + { + "epoch": 0.5091395452518948, + "grad_norm": 0.20777811110019684, + "learning_rate": 4.8314358894390395e-05, + "loss": 1.7165, + "step": 1142 + }, + { + "epoch": 0.509585376727597, + "grad_norm": 0.22741763293743134, + "learning_rate": 4.825108554197527e-05, + "loss": 1.7673, + "step": 1143 + }, + { + "epoch": 0.5100312082032992, + "grad_norm": 0.2043302208185196, + "learning_rate": 4.818779061881544e-05, + "loss": 1.6019, + "step": 1144 + }, + { + "epoch": 0.5104770396790014, + "grad_norm": 0.20986133813858032, + "learning_rate": 4.812447429038227e-05, + "loss": 1.7345, + "step": 1145 + }, + { + "epoch": 0.5109228711547035, + "grad_norm": 0.21103161573410034, + "learning_rate": 4.8061136722203145e-05, + "loss": 1.7266, + "step": 1146 + }, + { + "epoch": 0.5113687026304057, + "grad_norm": 0.20424163341522217, + "learning_rate": 4.799777807986091e-05, + "loss": 1.7143, + "step": 1147 + }, + { + "epoch": 0.5118145341061079, + "grad_norm": 0.2085757553577423, + "learning_rate": 4.793439852899357e-05, + "loss": 1.6392, + "step": 1148 + }, + { + "epoch": 0.5122603655818101, + "grad_norm": 0.21091756224632263, + "learning_rate": 4.787099823529374e-05, + "loss": 1.6563, + "step": 1149 + }, + { + "epoch": 0.5127061970575123, + "grad_norm": 0.20666278898715973, + "learning_rate": 4.78075773645083e-05, + "loss": 1.6265, + "step": 1150 + }, + { + "epoch": 0.5131520285332144, + "grad_norm": 0.20398664474487305, + "learning_rate": 4.77441360824379e-05, + "loss": 1.6852, + "step": 1151 + }, + { + "epoch": 0.5135978600089166, + "grad_norm": 0.21237637102603912, + "learning_rate": 4.768067455493656e-05, + "loss": 1.7676, + "step": 1152 + }, + { + "epoch": 0.5140436914846188, + "grad_norm": 0.20643079280853271, + "learning_rate": 4.761719294791123e-05, + "loss": 1.6755, + "step": 1153 + }, + { + "epoch": 0.514489522960321, + "grad_norm": 0.2095383107662201, + "learning_rate": 4.755369142732134e-05, + "loss": 1.7512, + "step": 1154 + }, + { + "epoch": 0.5149353544360232, + "grad_norm": 0.20500126481056213, + "learning_rate": 4.74901701591784e-05, + "loss": 1.7201, + "step": 1155 + }, + { + "epoch": 0.5153811859117253, + "grad_norm": 0.20776116847991943, + "learning_rate": 4.7426629309545524e-05, + "loss": 1.6956, + "step": 1156 + }, + { + "epoch": 0.5158270173874275, + "grad_norm": 0.20921987295150757, + "learning_rate": 4.736306904453705e-05, + "loss": 1.7125, + "step": 1157 + }, + { + "epoch": 0.5162728488631297, + "grad_norm": 0.20519307255744934, + "learning_rate": 4.7299489530318045e-05, + "loss": 1.6724, + "step": 1158 + }, + { + "epoch": 0.516718680338832, + "grad_norm": 0.21430650353431702, + "learning_rate": 4.723589093310391e-05, + "loss": 1.6469, + "step": 1159 + }, + { + "epoch": 0.5171645118145342, + "grad_norm": 0.21243280172348022, + "learning_rate": 4.7172273419159914e-05, + "loss": 1.7214, + "step": 1160 + }, + { + "epoch": 0.5176103432902363, + "grad_norm": 0.20934118330478668, + "learning_rate": 4.7108637154800814e-05, + "loss": 1.7079, + "step": 1161 + }, + { + "epoch": 0.5180561747659385, + "grad_norm": 0.22941792011260986, + "learning_rate": 4.704498230639036e-05, + "loss": 1.7564, + "step": 1162 + }, + { + "epoch": 0.5185020062416407, + "grad_norm": 0.21575021743774414, + "learning_rate": 4.698130904034092e-05, + "loss": 1.6879, + "step": 1163 + }, + { + "epoch": 0.5189478377173429, + "grad_norm": 0.20983125269412994, + "learning_rate": 4.6917617523112965e-05, + "loss": 1.6282, + "step": 1164 + }, + { + "epoch": 0.5193936691930451, + "grad_norm": 0.2068534642457962, + "learning_rate": 4.685390792121471e-05, + "loss": 1.6888, + "step": 1165 + }, + { + "epoch": 0.5198395006687472, + "grad_norm": 0.216607004404068, + "learning_rate": 4.679018040120163e-05, + "loss": 1.6749, + "step": 1166 + }, + { + "epoch": 0.5202853321444494, + "grad_norm": 0.21307985484600067, + "learning_rate": 4.672643512967606e-05, + "loss": 1.7165, + "step": 1167 + }, + { + "epoch": 0.5207311636201516, + "grad_norm": 0.2192450910806656, + "learning_rate": 4.666267227328674e-05, + "loss": 1.6348, + "step": 1168 + }, + { + "epoch": 0.5211769950958538, + "grad_norm": 0.2020072340965271, + "learning_rate": 4.659889199872838e-05, + "loss": 1.6711, + "step": 1169 + }, + { + "epoch": 0.5216228265715559, + "grad_norm": 0.21062718331813812, + "learning_rate": 4.653509447274121e-05, + "loss": 1.6351, + "step": 1170 + }, + { + "epoch": 0.5220686580472581, + "grad_norm": 0.21252241730690002, + "learning_rate": 4.64712798621106e-05, + "loss": 1.6811, + "step": 1171 + }, + { + "epoch": 0.5225144895229603, + "grad_norm": 0.20972108840942383, + "learning_rate": 4.640744833366653e-05, + "loss": 1.7092, + "step": 1172 + }, + { + "epoch": 0.5229603209986625, + "grad_norm": 0.20589883625507355, + "learning_rate": 4.634360005428326e-05, + "loss": 1.706, + "step": 1173 + }, + { + "epoch": 0.5234061524743647, + "grad_norm": 0.20493586361408234, + "learning_rate": 4.627973519087881e-05, + "loss": 1.7027, + "step": 1174 + }, + { + "epoch": 0.5238519839500668, + "grad_norm": 0.20914669334888458, + "learning_rate": 4.621585391041456e-05, + "loss": 1.7355, + "step": 1175 + }, + { + "epoch": 0.524297815425769, + "grad_norm": 0.2061576098203659, + "learning_rate": 4.615195637989482e-05, + "loss": 1.6094, + "step": 1176 + }, + { + "epoch": 0.5247436469014712, + "grad_norm": 0.2159758359193802, + "learning_rate": 4.608804276636638e-05, + "loss": 1.7434, + "step": 1177 + }, + { + "epoch": 0.5251894783771734, + "grad_norm": 0.22390104830265045, + "learning_rate": 4.602411323691805e-05, + "loss": 1.6114, + "step": 1178 + }, + { + "epoch": 0.5256353098528757, + "grad_norm": 0.20476752519607544, + "learning_rate": 4.59601679586803e-05, + "loss": 1.6924, + "step": 1179 + }, + { + "epoch": 0.5260811413285778, + "grad_norm": 0.20510250329971313, + "learning_rate": 4.5896207098824725e-05, + "loss": 1.6327, + "step": 1180 + }, + { + "epoch": 0.52652697280428, + "grad_norm": 0.20736494660377502, + "learning_rate": 4.583223082456368e-05, + "loss": 1.6333, + "step": 1181 + }, + { + "epoch": 0.5269728042799822, + "grad_norm": 0.221104234457016, + "learning_rate": 4.576823930314979e-05, + "loss": 1.7448, + "step": 1182 + }, + { + "epoch": 0.5274186357556844, + "grad_norm": 0.19380638003349304, + "learning_rate": 4.570423270187558e-05, + "loss": 1.582, + "step": 1183 + }, + { + "epoch": 0.5278644672313866, + "grad_norm": 0.2096526026725769, + "learning_rate": 4.564021118807298e-05, + "loss": 1.7032, + "step": 1184 + }, + { + "epoch": 0.5283102987070887, + "grad_norm": 0.21347478032112122, + "learning_rate": 4.55761749291129e-05, + "loss": 1.7078, + "step": 1185 + }, + { + "epoch": 0.5287561301827909, + "grad_norm": 0.2032448947429657, + "learning_rate": 4.5512124092404796e-05, + "loss": 1.7378, + "step": 1186 + }, + { + "epoch": 0.5292019616584931, + "grad_norm": 0.20943041145801544, + "learning_rate": 4.544805884539625e-05, + "loss": 1.7326, + "step": 1187 + }, + { + "epoch": 0.5296477931341953, + "grad_norm": 0.22112028300762177, + "learning_rate": 4.53839793555725e-05, + "loss": 1.7812, + "step": 1188 + }, + { + "epoch": 0.5300936246098975, + "grad_norm": 0.2149471938610077, + "learning_rate": 4.531988579045603e-05, + "loss": 1.7725, + "step": 1189 + }, + { + "epoch": 0.5305394560855996, + "grad_norm": 0.21456539630889893, + "learning_rate": 4.525577831760611e-05, + "loss": 1.7565, + "step": 1190 + }, + { + "epoch": 0.5309852875613018, + "grad_norm": 0.20755459368228912, + "learning_rate": 4.51916571046184e-05, + "loss": 1.7229, + "step": 1191 + }, + { + "epoch": 0.531431119037004, + "grad_norm": 0.21794572472572327, + "learning_rate": 4.512752231912443e-05, + "loss": 1.7428, + "step": 1192 + }, + { + "epoch": 0.5318769505127062, + "grad_norm": 0.2065848410129547, + "learning_rate": 4.506337412879125e-05, + "loss": 1.6261, + "step": 1193 + }, + { + "epoch": 0.5323227819884084, + "grad_norm": 0.2145584225654602, + "learning_rate": 4.4999212701320946e-05, + "loss": 1.7274, + "step": 1194 + }, + { + "epoch": 0.5327686134641105, + "grad_norm": 0.20807136595249176, + "learning_rate": 4.493503820445021e-05, + "loss": 1.6822, + "step": 1195 + }, + { + "epoch": 0.5332144449398127, + "grad_norm": 0.21459133923053741, + "learning_rate": 4.4870850805949885e-05, + "loss": 1.7168, + "step": 1196 + }, + { + "epoch": 0.533660276415515, + "grad_norm": 0.22650396823883057, + "learning_rate": 4.480665067362456e-05, + "loss": 1.6969, + "step": 1197 + }, + { + "epoch": 0.5341061078912172, + "grad_norm": 0.20575028657913208, + "learning_rate": 4.474243797531213e-05, + "loss": 1.7829, + "step": 1198 + }, + { + "epoch": 0.5345519393669194, + "grad_norm": 0.2113235592842102, + "learning_rate": 4.467821287888331e-05, + "loss": 1.6125, + "step": 1199 + }, + { + "epoch": 0.5349977708426215, + "grad_norm": 0.2215479612350464, + "learning_rate": 4.461397555224122e-05, + "loss": 1.706, + "step": 1200 + }, + { + "epoch": 0.5354436023183237, + "grad_norm": 0.21728859841823578, + "learning_rate": 4.454972616332099e-05, + "loss": 1.6154, + "step": 1201 + }, + { + "epoch": 0.5358894337940259, + "grad_norm": 0.20990782976150513, + "learning_rate": 4.448546488008927e-05, + "loss": 1.7413, + "step": 1202 + }, + { + "epoch": 0.5363352652697281, + "grad_norm": 0.21578721702098846, + "learning_rate": 4.4421191870543786e-05, + "loss": 1.68, + "step": 1203 + }, + { + "epoch": 0.5367810967454302, + "grad_norm": 0.20698952674865723, + "learning_rate": 4.4356907302712954e-05, + "loss": 1.6057, + "step": 1204 + }, + { + "epoch": 0.5372269282211324, + "grad_norm": 0.2218383550643921, + "learning_rate": 4.429261134465537e-05, + "loss": 1.7111, + "step": 1205 + }, + { + "epoch": 0.5376727596968346, + "grad_norm": 0.21707823872566223, + "learning_rate": 4.422830416445944e-05, + "loss": 1.6202, + "step": 1206 + }, + { + "epoch": 0.5381185911725368, + "grad_norm": 0.20811119675636292, + "learning_rate": 4.4163985930242894e-05, + "loss": 1.6383, + "step": 1207 + }, + { + "epoch": 0.538564422648239, + "grad_norm": 0.21612778306007385, + "learning_rate": 4.4099656810152354e-05, + "loss": 1.7539, + "step": 1208 + }, + { + "epoch": 0.5390102541239411, + "grad_norm": 0.212476447224617, + "learning_rate": 4.4035316972362895e-05, + "loss": 1.6803, + "step": 1209 + }, + { + "epoch": 0.5394560855996433, + "grad_norm": 0.21586640179157257, + "learning_rate": 4.397096658507764e-05, + "loss": 1.7336, + "step": 1210 + }, + { + "epoch": 0.5399019170753455, + "grad_norm": 0.21065600216388702, + "learning_rate": 4.390660581652726e-05, + "loss": 1.6984, + "step": 1211 + }, + { + "epoch": 0.5403477485510477, + "grad_norm": 0.2109772115945816, + "learning_rate": 4.3842234834969604e-05, + "loss": 1.7086, + "step": 1212 + }, + { + "epoch": 0.5407935800267499, + "grad_norm": 0.21016134321689606, + "learning_rate": 4.3777853808689187e-05, + "loss": 1.7631, + "step": 1213 + }, + { + "epoch": 0.541239411502452, + "grad_norm": 0.20888204872608185, + "learning_rate": 4.371346290599678e-05, + "loss": 1.7335, + "step": 1214 + }, + { + "epoch": 0.5416852429781542, + "grad_norm": 0.20656396448612213, + "learning_rate": 4.3649062295228995e-05, + "loss": 1.6545, + "step": 1215 + }, + { + "epoch": 0.5421310744538564, + "grad_norm": 0.2054409235715866, + "learning_rate": 4.358465214474781e-05, + "loss": 1.6127, + "step": 1216 + }, + { + "epoch": 0.5425769059295587, + "grad_norm": 0.20464791357517242, + "learning_rate": 4.352023262294016e-05, + "loss": 1.6299, + "step": 1217 + }, + { + "epoch": 0.5430227374052609, + "grad_norm": 0.20844127237796783, + "learning_rate": 4.345580389821749e-05, + "loss": 1.6403, + "step": 1218 + }, + { + "epoch": 0.543468568880963, + "grad_norm": 0.21710409224033356, + "learning_rate": 4.339136613901524e-05, + "loss": 1.7565, + "step": 1219 + }, + { + "epoch": 0.5439144003566652, + "grad_norm": 0.2088676393032074, + "learning_rate": 4.3326919513792526e-05, + "loss": 1.6293, + "step": 1220 + }, + { + "epoch": 0.5443602318323674, + "grad_norm": 0.22343219816684723, + "learning_rate": 4.3262464191031635e-05, + "loss": 1.7625, + "step": 1221 + }, + { + "epoch": 0.5448060633080696, + "grad_norm": 0.21561478078365326, + "learning_rate": 4.319800033923757e-05, + "loss": 1.6505, + "step": 1222 + }, + { + "epoch": 0.5452518947837718, + "grad_norm": 0.20941907167434692, + "learning_rate": 4.313352812693767e-05, + "loss": 1.7469, + "step": 1223 + }, + { + "epoch": 0.5456977262594739, + "grad_norm": 0.21117591857910156, + "learning_rate": 4.306904772268109e-05, + "loss": 1.7139, + "step": 1224 + }, + { + "epoch": 0.5461435577351761, + "grad_norm": 0.20840439200401306, + "learning_rate": 4.3004559295038414e-05, + "loss": 1.6465, + "step": 1225 + }, + { + "epoch": 0.5465893892108783, + "grad_norm": 0.22241465747356415, + "learning_rate": 4.294006301260122e-05, + "loss": 1.7182, + "step": 1226 + }, + { + "epoch": 0.5470352206865805, + "grad_norm": 0.2113228589296341, + "learning_rate": 4.2875559043981584e-05, + "loss": 1.659, + "step": 1227 + }, + { + "epoch": 0.5474810521622827, + "grad_norm": 0.2043575644493103, + "learning_rate": 4.281104755781172e-05, + "loss": 1.6871, + "step": 1228 + }, + { + "epoch": 0.5479268836379848, + "grad_norm": 0.21754762530326843, + "learning_rate": 4.274652872274347e-05, + "loss": 1.7026, + "step": 1229 + }, + { + "epoch": 0.548372715113687, + "grad_norm": 0.20709997415542603, + "learning_rate": 4.268200270744788e-05, + "loss": 1.6666, + "step": 1230 + }, + { + "epoch": 0.5488185465893892, + "grad_norm": 0.21000979840755463, + "learning_rate": 4.261746968061478e-05, + "loss": 1.7116, + "step": 1231 + }, + { + "epoch": 0.5492643780650914, + "grad_norm": 0.21727611124515533, + "learning_rate": 4.2552929810952335e-05, + "loss": 1.7343, + "step": 1232 + }, + { + "epoch": 0.5497102095407935, + "grad_norm": 0.20880410075187683, + "learning_rate": 4.248838326718659e-05, + "loss": 1.6643, + "step": 1233 + }, + { + "epoch": 0.5501560410164957, + "grad_norm": 0.20666611194610596, + "learning_rate": 4.2423830218061044e-05, + "loss": 1.6874, + "step": 1234 + }, + { + "epoch": 0.5506018724921979, + "grad_norm": 0.20641161501407623, + "learning_rate": 4.23592708323362e-05, + "loss": 1.7163, + "step": 1235 + }, + { + "epoch": 0.5510477039679001, + "grad_norm": 0.21506550908088684, + "learning_rate": 4.229470527878912e-05, + "loss": 1.7524, + "step": 1236 + }, + { + "epoch": 0.5514935354436024, + "grad_norm": 0.20672358572483063, + "learning_rate": 4.2230133726213006e-05, + "loss": 1.7025, + "step": 1237 + }, + { + "epoch": 0.5519393669193045, + "grad_norm": 0.21491588652133942, + "learning_rate": 4.216555634341671e-05, + "loss": 1.7526, + "step": 1238 + }, + { + "epoch": 0.5523851983950067, + "grad_norm": 0.2145368456840515, + "learning_rate": 4.2100973299224365e-05, + "loss": 1.7464, + "step": 1239 + }, + { + "epoch": 0.5528310298707089, + "grad_norm": 0.2077668011188507, + "learning_rate": 4.2036384762474914e-05, + "loss": 1.5391, + "step": 1240 + }, + { + "epoch": 0.5532768613464111, + "grad_norm": 0.2238229662179947, + "learning_rate": 4.197179090202161e-05, + "loss": 1.7474, + "step": 1241 + }, + { + "epoch": 0.5537226928221133, + "grad_norm": 0.20992541313171387, + "learning_rate": 4.190719188673164e-05, + "loss": 1.7062, + "step": 1242 + }, + { + "epoch": 0.5541685242978154, + "grad_norm": 0.2246631383895874, + "learning_rate": 4.18425878854857e-05, + "loss": 1.7767, + "step": 1243 + }, + { + "epoch": 0.5546143557735176, + "grad_norm": 0.2132015824317932, + "learning_rate": 4.177797906717747e-05, + "loss": 1.7522, + "step": 1244 + }, + { + "epoch": 0.5550601872492198, + "grad_norm": 0.21752022206783295, + "learning_rate": 4.171336560071327e-05, + "loss": 1.691, + "step": 1245 + }, + { + "epoch": 0.555506018724922, + "grad_norm": 0.2150384783744812, + "learning_rate": 4.1648747655011535e-05, + "loss": 1.7054, + "step": 1246 + }, + { + "epoch": 0.5559518502006242, + "grad_norm": 0.21931275725364685, + "learning_rate": 4.158412539900245e-05, + "loss": 1.7843, + "step": 1247 + }, + { + "epoch": 0.5563976816763263, + "grad_norm": 0.20713931322097778, + "learning_rate": 4.151949900162743e-05, + "loss": 1.6939, + "step": 1248 + }, + { + "epoch": 0.5568435131520285, + "grad_norm": 0.21415765583515167, + "learning_rate": 4.1454868631838726e-05, + "loss": 1.7499, + "step": 1249 + }, + { + "epoch": 0.5572893446277307, + "grad_norm": 0.20482096076011658, + "learning_rate": 4.139023445859899e-05, + "loss": 1.659, + "step": 1250 + }, + { + "epoch": 0.5577351761034329, + "grad_norm": 0.2121836245059967, + "learning_rate": 4.132559665088079e-05, + "loss": 1.6821, + "step": 1251 + }, + { + "epoch": 0.5581810075791351, + "grad_norm": 0.2097795456647873, + "learning_rate": 4.126095537766623e-05, + "loss": 1.7184, + "step": 1252 + }, + { + "epoch": 0.5586268390548372, + "grad_norm": 0.20807453989982605, + "learning_rate": 4.1196310807946454e-05, + "loss": 1.6055, + "step": 1253 + }, + { + "epoch": 0.5590726705305394, + "grad_norm": 0.20928820967674255, + "learning_rate": 4.113166311072123e-05, + "loss": 1.693, + "step": 1254 + }, + { + "epoch": 0.5595185020062416, + "grad_norm": 0.19924867153167725, + "learning_rate": 4.106701245499847e-05, + "loss": 1.6189, + "step": 1255 + }, + { + "epoch": 0.5599643334819439, + "grad_norm": 0.2069970667362213, + "learning_rate": 4.100235900979389e-05, + "loss": 1.734, + "step": 1256 + }, + { + "epoch": 0.5604101649576461, + "grad_norm": 0.20954173803329468, + "learning_rate": 4.0937702944130426e-05, + "loss": 1.6472, + "step": 1257 + }, + { + "epoch": 0.5608559964333482, + "grad_norm": 0.2086055427789688, + "learning_rate": 4.08730444270379e-05, + "loss": 1.6381, + "step": 1258 + }, + { + "epoch": 0.5613018279090504, + "grad_norm": 0.22531044483184814, + "learning_rate": 4.080838362755256e-05, + "loss": 1.7409, + "step": 1259 + }, + { + "epoch": 0.5617476593847526, + "grad_norm": 0.20108306407928467, + "learning_rate": 4.0743720714716584e-05, + "loss": 1.6589, + "step": 1260 + }, + { + "epoch": 0.5621934908604548, + "grad_norm": 0.22010278701782227, + "learning_rate": 4.0679055857577686e-05, + "loss": 1.6753, + "step": 1261 + }, + { + "epoch": 0.5626393223361569, + "grad_norm": 0.2158752679824829, + "learning_rate": 4.0614389225188675e-05, + "loss": 1.7061, + "step": 1262 + }, + { + "epoch": 0.5630851538118591, + "grad_norm": 0.21102535724639893, + "learning_rate": 4.0549720986607e-05, + "loss": 1.6903, + "step": 1263 + }, + { + "epoch": 0.5635309852875613, + "grad_norm": 0.205876886844635, + "learning_rate": 4.048505131089429e-05, + "loss": 1.7025, + "step": 1264 + }, + { + "epoch": 0.5639768167632635, + "grad_norm": 0.2214573621749878, + "learning_rate": 4.042038036711594e-05, + "loss": 1.7468, + "step": 1265 + }, + { + "epoch": 0.5644226482389657, + "grad_norm": 0.22327987849712372, + "learning_rate": 4.035570832434068e-05, + "loss": 1.7641, + "step": 1266 + }, + { + "epoch": 0.5648684797146678, + "grad_norm": 0.20828768610954285, + "learning_rate": 4.029103535164008e-05, + "loss": 1.6509, + "step": 1267 + }, + { + "epoch": 0.56531431119037, + "grad_norm": 0.2247881144285202, + "learning_rate": 4.0226361618088166e-05, + "loss": 1.8081, + "step": 1268 + }, + { + "epoch": 0.5657601426660722, + "grad_norm": 0.2249002456665039, + "learning_rate": 4.0161687292760956e-05, + "loss": 1.6715, + "step": 1269 + }, + { + "epoch": 0.5662059741417744, + "grad_norm": 0.2057991921901703, + "learning_rate": 4.009701254473599e-05, + "loss": 1.663, + "step": 1270 + }, + { + "epoch": 0.5666518056174766, + "grad_norm": 0.21340209245681763, + "learning_rate": 4.003233754309192e-05, + "loss": 1.7563, + "step": 1271 + }, + { + "epoch": 0.5670976370931787, + "grad_norm": 0.22589686512947083, + "learning_rate": 3.996766245690809e-05, + "loss": 1.6642, + "step": 1272 + }, + { + "epoch": 0.5675434685688809, + "grad_norm": 0.2142912894487381, + "learning_rate": 3.9902987455264026e-05, + "loss": 1.723, + "step": 1273 + }, + { + "epoch": 0.5679893000445831, + "grad_norm": 0.21880853176116943, + "learning_rate": 3.983831270723906e-05, + "loss": 1.796, + "step": 1274 + }, + { + "epoch": 0.5684351315202854, + "grad_norm": 0.2069232165813446, + "learning_rate": 3.9773638381911834e-05, + "loss": 1.7044, + "step": 1275 + }, + { + "epoch": 0.5688809629959876, + "grad_norm": 0.21509326994419098, + "learning_rate": 3.970896464835994e-05, + "loss": 1.7285, + "step": 1276 + }, + { + "epoch": 0.5693267944716897, + "grad_norm": 0.21167059242725372, + "learning_rate": 3.9644291675659335e-05, + "loss": 1.6782, + "step": 1277 + }, + { + "epoch": 0.5697726259473919, + "grad_norm": 0.2061755210161209, + "learning_rate": 3.9579619632884073e-05, + "loss": 1.6552, + "step": 1278 + }, + { + "epoch": 0.5702184574230941, + "grad_norm": 0.20187799632549286, + "learning_rate": 3.951494868910572e-05, + "loss": 1.7067, + "step": 1279 + }, + { + "epoch": 0.5706642888987963, + "grad_norm": 0.21447299420833588, + "learning_rate": 3.945027901339302e-05, + "loss": 1.7124, + "step": 1280 + }, + { + "epoch": 0.5711101203744985, + "grad_norm": 0.20652039349079132, + "learning_rate": 3.938561077481133e-05, + "loss": 1.6828, + "step": 1281 + }, + { + "epoch": 0.5715559518502006, + "grad_norm": 0.2095683366060257, + "learning_rate": 3.9320944142422314e-05, + "loss": 1.6283, + "step": 1282 + }, + { + "epoch": 0.5720017833259028, + "grad_norm": 0.20461034774780273, + "learning_rate": 3.925627928528342e-05, + "loss": 1.6263, + "step": 1283 + }, + { + "epoch": 0.572447614801605, + "grad_norm": 0.22536614537239075, + "learning_rate": 3.919161637244744e-05, + "loss": 1.6778, + "step": 1284 + }, + { + "epoch": 0.5728934462773072, + "grad_norm": 0.20508262515068054, + "learning_rate": 3.9126955572962105e-05, + "loss": 1.6136, + "step": 1285 + }, + { + "epoch": 0.5733392777530094, + "grad_norm": 0.21751144528388977, + "learning_rate": 3.906229705586959e-05, + "loss": 1.6689, + "step": 1286 + }, + { + "epoch": 0.5737851092287115, + "grad_norm": 0.2109742909669876, + "learning_rate": 3.899764099020614e-05, + "loss": 1.6574, + "step": 1287 + }, + { + "epoch": 0.5742309407044137, + "grad_norm": 0.2066531479358673, + "learning_rate": 3.8932987545001536e-05, + "loss": 1.672, + "step": 1288 + }, + { + "epoch": 0.5746767721801159, + "grad_norm": 0.2212832123041153, + "learning_rate": 3.886833688927879e-05, + "loss": 1.7101, + "step": 1289 + }, + { + "epoch": 0.5751226036558181, + "grad_norm": 0.2111194133758545, + "learning_rate": 3.880368919205355e-05, + "loss": 1.6873, + "step": 1290 + }, + { + "epoch": 0.5755684351315203, + "grad_norm": 0.2206118106842041, + "learning_rate": 3.873904462233377e-05, + "loss": 1.8081, + "step": 1291 + }, + { + "epoch": 0.5760142666072224, + "grad_norm": 0.21819579601287842, + "learning_rate": 3.867440334911923e-05, + "loss": 1.7059, + "step": 1292 + }, + { + "epoch": 0.5764600980829246, + "grad_norm": 0.22220070660114288, + "learning_rate": 3.860976554140103e-05, + "loss": 1.7815, + "step": 1293 + }, + { + "epoch": 0.5769059295586269, + "grad_norm": 0.22105947136878967, + "learning_rate": 3.8545131368161294e-05, + "loss": 1.6533, + "step": 1294 + }, + { + "epoch": 0.5773517610343291, + "grad_norm": 0.21066629886627197, + "learning_rate": 3.848050099837258e-05, + "loss": 1.7196, + "step": 1295 + }, + { + "epoch": 0.5777975925100312, + "grad_norm": 0.2197866439819336, + "learning_rate": 3.841587460099755e-05, + "loss": 1.766, + "step": 1296 + }, + { + "epoch": 0.5782434239857334, + "grad_norm": 0.22813376784324646, + "learning_rate": 3.835125234498847e-05, + "loss": 1.7168, + "step": 1297 + }, + { + "epoch": 0.5786892554614356, + "grad_norm": 0.21724256873130798, + "learning_rate": 3.828663439928674e-05, + "loss": 1.6868, + "step": 1298 + }, + { + "epoch": 0.5791350869371378, + "grad_norm": 0.20569388568401337, + "learning_rate": 3.822202093282254e-05, + "loss": 1.6582, + "step": 1299 + }, + { + "epoch": 0.57958091841284, + "grad_norm": 0.23136617243289948, + "learning_rate": 3.815741211451431e-05, + "loss": 1.6759, + "step": 1300 + }, + { + "epoch": 0.5800267498885421, + "grad_norm": 0.23664671182632446, + "learning_rate": 3.809280811326838e-05, + "loss": 1.5923, + "step": 1301 + }, + { + "epoch": 0.5804725813642443, + "grad_norm": 0.20310552418231964, + "learning_rate": 3.80282090979784e-05, + "loss": 1.6308, + "step": 1302 + }, + { + "epoch": 0.5809184128399465, + "grad_norm": 0.22173622250556946, + "learning_rate": 3.796361523752509e-05, + "loss": 1.7119, + "step": 1303 + }, + { + "epoch": 0.5813642443156487, + "grad_norm": 0.24687911570072174, + "learning_rate": 3.789902670077564e-05, + "loss": 1.7658, + "step": 1304 + }, + { + "epoch": 0.5818100757913509, + "grad_norm": 0.20782530307769775, + "learning_rate": 3.7834443656583304e-05, + "loss": 1.6894, + "step": 1305 + }, + { + "epoch": 0.582255907267053, + "grad_norm": 0.2082975208759308, + "learning_rate": 3.776986627378702e-05, + "loss": 1.7648, + "step": 1306 + }, + { + "epoch": 0.5827017387427552, + "grad_norm": 0.22985926270484924, + "learning_rate": 3.770529472121089e-05, + "loss": 1.6862, + "step": 1307 + }, + { + "epoch": 0.5831475702184574, + "grad_norm": 0.2407078593969345, + "learning_rate": 3.764072916766382e-05, + "loss": 1.7293, + "step": 1308 + }, + { + "epoch": 0.5835934016941596, + "grad_norm": 0.20803655683994293, + "learning_rate": 3.757616978193896e-05, + "loss": 1.7183, + "step": 1309 + }, + { + "epoch": 0.5840392331698618, + "grad_norm": 0.21299923956394196, + "learning_rate": 3.751161673281341e-05, + "loss": 1.766, + "step": 1310 + }, + { + "epoch": 0.5844850646455639, + "grad_norm": 0.22252234816551208, + "learning_rate": 3.744707018904767e-05, + "loss": 1.673, + "step": 1311 + }, + { + "epoch": 0.5849308961212661, + "grad_norm": 0.20621894299983978, + "learning_rate": 3.738253031938522e-05, + "loss": 1.6133, + "step": 1312 + }, + { + "epoch": 0.5853767275969683, + "grad_norm": 0.2171364426612854, + "learning_rate": 3.731799729255214e-05, + "loss": 1.7132, + "step": 1313 + }, + { + "epoch": 0.5858225590726706, + "grad_norm": 0.21667996048927307, + "learning_rate": 3.725347127725654e-05, + "loss": 1.7738, + "step": 1314 + }, + { + "epoch": 0.5862683905483728, + "grad_norm": 0.20949135720729828, + "learning_rate": 3.71889524421883e-05, + "loss": 1.7024, + "step": 1315 + }, + { + "epoch": 0.5867142220240749, + "grad_norm": 0.21288646757602692, + "learning_rate": 3.712444095601843e-05, + "loss": 1.7323, + "step": 1316 + }, + { + "epoch": 0.5871600534997771, + "grad_norm": 0.2060508131980896, + "learning_rate": 3.7059936987398794e-05, + "loss": 1.7078, + "step": 1317 + }, + { + "epoch": 0.5876058849754793, + "grad_norm": 0.20819522440433502, + "learning_rate": 3.69954407049616e-05, + "loss": 1.7087, + "step": 1318 + }, + { + "epoch": 0.5880517164511815, + "grad_norm": 0.220333993434906, + "learning_rate": 3.693095227731892e-05, + "loss": 1.7059, + "step": 1319 + }, + { + "epoch": 0.5884975479268837, + "grad_norm": 0.21376989781856537, + "learning_rate": 3.686647187306235e-05, + "loss": 1.7523, + "step": 1320 + }, + { + "epoch": 0.5889433794025858, + "grad_norm": 0.20429478585720062, + "learning_rate": 3.6801999660762435e-05, + "loss": 1.6969, + "step": 1321 + }, + { + "epoch": 0.589389210878288, + "grad_norm": 0.21376128494739532, + "learning_rate": 3.673753580896838e-05, + "loss": 1.72, + "step": 1322 + }, + { + "epoch": 0.5898350423539902, + "grad_norm": 0.20890051126480103, + "learning_rate": 3.667308048620748e-05, + "loss": 1.6798, + "step": 1323 + }, + { + "epoch": 0.5902808738296924, + "grad_norm": 0.20608918368816376, + "learning_rate": 3.660863386098477e-05, + "loss": 1.73, + "step": 1324 + }, + { + "epoch": 0.5907267053053945, + "grad_norm": 0.204619899392128, + "learning_rate": 3.654419610178253e-05, + "loss": 1.686, + "step": 1325 + }, + { + "epoch": 0.5911725367810967, + "grad_norm": 0.211436465382576, + "learning_rate": 3.647976737705984e-05, + "loss": 1.6271, + "step": 1326 + }, + { + "epoch": 0.5916183682567989, + "grad_norm": 0.21040436625480652, + "learning_rate": 3.64153478552522e-05, + "loss": 1.6804, + "step": 1327 + }, + { + "epoch": 0.5920641997325011, + "grad_norm": 0.20360523462295532, + "learning_rate": 3.635093770477101e-05, + "loss": 1.6669, + "step": 1328 + }, + { + "epoch": 0.5925100312082033, + "grad_norm": 0.22287213802337646, + "learning_rate": 3.628653709400324e-05, + "loss": 1.655, + "step": 1329 + }, + { + "epoch": 0.5929558626839054, + "grad_norm": 0.20920884609222412, + "learning_rate": 3.6222146191310834e-05, + "loss": 1.689, + "step": 1330 + }, + { + "epoch": 0.5934016941596076, + "grad_norm": 0.20646342635154724, + "learning_rate": 3.61577651650304e-05, + "loss": 1.6546, + "step": 1331 + }, + { + "epoch": 0.5938475256353098, + "grad_norm": 0.20973603427410126, + "learning_rate": 3.609339418347275e-05, + "loss": 1.6167, + "step": 1332 + }, + { + "epoch": 0.594293357111012, + "grad_norm": 0.21416504681110382, + "learning_rate": 3.6029033414922376e-05, + "loss": 1.6796, + "step": 1333 + }, + { + "epoch": 0.5947391885867143, + "grad_norm": 0.2077873796224594, + "learning_rate": 3.596468302763712e-05, + "loss": 1.7042, + "step": 1334 + }, + { + "epoch": 0.5951850200624164, + "grad_norm": 0.209193617105484, + "learning_rate": 3.590034318984766e-05, + "loss": 1.6982, + "step": 1335 + }, + { + "epoch": 0.5956308515381186, + "grad_norm": 0.20596294105052948, + "learning_rate": 3.5836014069757126e-05, + "loss": 1.6244, + "step": 1336 + }, + { + "epoch": 0.5960766830138208, + "grad_norm": 0.21266692876815796, + "learning_rate": 3.577169583554057e-05, + "loss": 1.7512, + "step": 1337 + }, + { + "epoch": 0.596522514489523, + "grad_norm": 0.2089705914258957, + "learning_rate": 3.570738865534464e-05, + "loss": 1.7652, + "step": 1338 + }, + { + "epoch": 0.5969683459652252, + "grad_norm": 0.21891768276691437, + "learning_rate": 3.564309269728706e-05, + "loss": 1.7518, + "step": 1339 + }, + { + "epoch": 0.5974141774409273, + "grad_norm": 0.20328959822654724, + "learning_rate": 3.5578808129456214e-05, + "loss": 1.6233, + "step": 1340 + }, + { + "epoch": 0.5978600089166295, + "grad_norm": 0.20454058051109314, + "learning_rate": 3.5514535119910744e-05, + "loss": 1.6752, + "step": 1341 + }, + { + "epoch": 0.5983058403923317, + "grad_norm": 0.20984551310539246, + "learning_rate": 3.545027383667902e-05, + "loss": 1.695, + "step": 1342 + }, + { + "epoch": 0.5987516718680339, + "grad_norm": 0.2098245620727539, + "learning_rate": 3.5386024447758796e-05, + "loss": 1.7191, + "step": 1343 + }, + { + "epoch": 0.5991975033437361, + "grad_norm": 0.21946768462657928, + "learning_rate": 3.53217871211167e-05, + "loss": 1.7313, + "step": 1344 + }, + { + "epoch": 0.5996433348194382, + "grad_norm": 0.20916922390460968, + "learning_rate": 3.525756202468787e-05, + "loss": 1.7244, + "step": 1345 + }, + { + "epoch": 0.6000891662951404, + "grad_norm": 0.21778373420238495, + "learning_rate": 3.519334932637544e-05, + "loss": 1.7263, + "step": 1346 + }, + { + "epoch": 0.6005349977708426, + "grad_norm": 0.20586247742176056, + "learning_rate": 3.512914919405012e-05, + "loss": 1.7088, + "step": 1347 + }, + { + "epoch": 0.6009808292465448, + "grad_norm": 0.21380962431430817, + "learning_rate": 3.5064961795549814e-05, + "loss": 1.6708, + "step": 1348 + }, + { + "epoch": 0.601426660722247, + "grad_norm": 0.2149226814508438, + "learning_rate": 3.500078729867906e-05, + "loss": 1.7002, + "step": 1349 + }, + { + "epoch": 0.6018724921979491, + "grad_norm": 0.21493519842624664, + "learning_rate": 3.493662587120876e-05, + "loss": 1.6753, + "step": 1350 + }, + { + "epoch": 0.6023183236736513, + "grad_norm": 0.22240254282951355, + "learning_rate": 3.4872477680875575e-05, + "loss": 1.7299, + "step": 1351 + }, + { + "epoch": 0.6027641551493536, + "grad_norm": 0.2129889726638794, + "learning_rate": 3.4808342895381607e-05, + "loss": 1.6543, + "step": 1352 + }, + { + "epoch": 0.6032099866250558, + "grad_norm": 0.21996819972991943, + "learning_rate": 3.4744221682393896e-05, + "loss": 1.6681, + "step": 1353 + }, + { + "epoch": 0.603655818100758, + "grad_norm": 0.20934395492076874, + "learning_rate": 3.4680114209543976e-05, + "loss": 1.6776, + "step": 1354 + }, + { + "epoch": 0.6041016495764601, + "grad_norm": 0.22622878849506378, + "learning_rate": 3.461602064442751e-05, + "loss": 1.6864, + "step": 1355 + }, + { + "epoch": 0.6045474810521623, + "grad_norm": 0.21692845225334167, + "learning_rate": 3.4551941154603756e-05, + "loss": 1.6491, + "step": 1356 + }, + { + "epoch": 0.6049933125278645, + "grad_norm": 0.21454817056655884, + "learning_rate": 3.448787590759522e-05, + "loss": 1.7218, + "step": 1357 + }, + { + "epoch": 0.6054391440035667, + "grad_norm": 0.20865899324417114, + "learning_rate": 3.442382507088711e-05, + "loss": 1.6616, + "step": 1358 + }, + { + "epoch": 0.6058849754792688, + "grad_norm": 0.21888892352581024, + "learning_rate": 3.435978881192702e-05, + "loss": 1.6815, + "step": 1359 + }, + { + "epoch": 0.606330806954971, + "grad_norm": 0.22078275680541992, + "learning_rate": 3.4295767298124434e-05, + "loss": 1.6127, + "step": 1360 + }, + { + "epoch": 0.6067766384306732, + "grad_norm": 0.212102010846138, + "learning_rate": 3.423176069685022e-05, + "loss": 1.6954, + "step": 1361 + }, + { + "epoch": 0.6072224699063754, + "grad_norm": 0.21632829308509827, + "learning_rate": 3.416776917543634e-05, + "loss": 1.6828, + "step": 1362 + }, + { + "epoch": 0.6076683013820776, + "grad_norm": 0.21511194109916687, + "learning_rate": 3.410379290117528e-05, + "loss": 1.7029, + "step": 1363 + }, + { + "epoch": 0.6081141328577797, + "grad_norm": 0.21631886065006256, + "learning_rate": 3.4039832041319724e-05, + "loss": 1.7318, + "step": 1364 + }, + { + "epoch": 0.6085599643334819, + "grad_norm": 0.2190600335597992, + "learning_rate": 3.397588676308196e-05, + "loss": 1.7437, + "step": 1365 + }, + { + "epoch": 0.6090057958091841, + "grad_norm": 0.21760675311088562, + "learning_rate": 3.3911957233633634e-05, + "loss": 1.7144, + "step": 1366 + }, + { + "epoch": 0.6094516272848863, + "grad_norm": 0.20560279488563538, + "learning_rate": 3.3848043620105193e-05, + "loss": 1.6706, + "step": 1367 + }, + { + "epoch": 0.6098974587605885, + "grad_norm": 0.21536052227020264, + "learning_rate": 3.378414608958544e-05, + "loss": 1.7173, + "step": 1368 + }, + { + "epoch": 0.6103432902362906, + "grad_norm": 0.21057669818401337, + "learning_rate": 3.372026480912121e-05, + "loss": 1.6717, + "step": 1369 + }, + { + "epoch": 0.6107891217119928, + "grad_norm": 0.21065440773963928, + "learning_rate": 3.3656399945716754e-05, + "loss": 1.6259, + "step": 1370 + }, + { + "epoch": 0.611234953187695, + "grad_norm": 0.2036159187555313, + "learning_rate": 3.3592551666333484e-05, + "loss": 1.7031, + "step": 1371 + }, + { + "epoch": 0.6116807846633973, + "grad_norm": 0.21961021423339844, + "learning_rate": 3.352872013788941e-05, + "loss": 1.7019, + "step": 1372 + }, + { + "epoch": 0.6121266161390995, + "grad_norm": 0.20731209218502045, + "learning_rate": 3.346490552725879e-05, + "loss": 1.668, + "step": 1373 + }, + { + "epoch": 0.6125724476148016, + "grad_norm": 0.2076679915189743, + "learning_rate": 3.3401108001271643e-05, + "loss": 1.6821, + "step": 1374 + }, + { + "epoch": 0.6130182790905038, + "grad_norm": 0.20885632932186127, + "learning_rate": 3.333732772671327e-05, + "loss": 1.7199, + "step": 1375 + }, + { + "epoch": 0.613464110566206, + "grad_norm": 0.20752932131290436, + "learning_rate": 3.327356487032396e-05, + "loss": 1.56, + "step": 1376 + }, + { + "epoch": 0.6139099420419082, + "grad_norm": 0.20185458660125732, + "learning_rate": 3.320981959879839e-05, + "loss": 1.5693, + "step": 1377 + }, + { + "epoch": 0.6143557735176104, + "grad_norm": 0.21623708307743073, + "learning_rate": 3.314609207878531e-05, + "loss": 1.699, + "step": 1378 + }, + { + "epoch": 0.6148016049933125, + "grad_norm": 0.21628855168819427, + "learning_rate": 3.308238247688705e-05, + "loss": 1.6722, + "step": 1379 + }, + { + "epoch": 0.6152474364690147, + "grad_norm": 0.21141193807125092, + "learning_rate": 3.3018690959659085e-05, + "loss": 1.667, + "step": 1380 + }, + { + "epoch": 0.6156932679447169, + "grad_norm": 0.2191365510225296, + "learning_rate": 3.2955017693609644e-05, + "loss": 1.7972, + "step": 1381 + }, + { + "epoch": 0.6161390994204191, + "grad_norm": 0.20816870033740997, + "learning_rate": 3.289136284519919e-05, + "loss": 1.6487, + "step": 1382 + }, + { + "epoch": 0.6165849308961213, + "grad_norm": 0.21103070676326752, + "learning_rate": 3.28277265808401e-05, + "loss": 1.6749, + "step": 1383 + }, + { + "epoch": 0.6170307623718234, + "grad_norm": 0.21357609331607819, + "learning_rate": 3.2764109066896106e-05, + "loss": 1.6992, + "step": 1384 + }, + { + "epoch": 0.6174765938475256, + "grad_norm": 0.2102755904197693, + "learning_rate": 3.2700510469681955e-05, + "loss": 1.6287, + "step": 1385 + }, + { + "epoch": 0.6179224253232278, + "grad_norm": 0.2037838250398636, + "learning_rate": 3.263693095546296e-05, + "loss": 1.6018, + "step": 1386 + }, + { + "epoch": 0.61836825679893, + "grad_norm": 0.20810502767562866, + "learning_rate": 3.257337069045448e-05, + "loss": 1.6178, + "step": 1387 + }, + { + "epoch": 0.6188140882746321, + "grad_norm": 0.21527808904647827, + "learning_rate": 3.250982984082161e-05, + "loss": 1.7163, + "step": 1388 + }, + { + "epoch": 0.6192599197503343, + "grad_norm": 0.223877415060997, + "learning_rate": 3.244630857267867e-05, + "loss": 1.8249, + "step": 1389 + }, + { + "epoch": 0.6197057512260365, + "grad_norm": 0.20198240876197815, + "learning_rate": 3.238280705208879e-05, + "loss": 1.6193, + "step": 1390 + }, + { + "epoch": 0.6201515827017388, + "grad_norm": 0.2109755128622055, + "learning_rate": 3.231932544506345e-05, + "loss": 1.6658, + "step": 1391 + }, + { + "epoch": 0.620597414177441, + "grad_norm": 0.208167165517807, + "learning_rate": 3.2255863917562104e-05, + "loss": 1.7003, + "step": 1392 + }, + { + "epoch": 0.6210432456531431, + "grad_norm": 0.20778894424438477, + "learning_rate": 3.2192422635491714e-05, + "loss": 1.6847, + "step": 1393 + }, + { + "epoch": 0.6214890771288453, + "grad_norm": 0.20059505105018616, + "learning_rate": 3.2129001764706264e-05, + "loss": 1.5389, + "step": 1394 + }, + { + "epoch": 0.6219349086045475, + "grad_norm": 0.2218339443206787, + "learning_rate": 3.206560147100644e-05, + "loss": 1.6874, + "step": 1395 + }, + { + "epoch": 0.6223807400802497, + "grad_norm": 0.20961079001426697, + "learning_rate": 3.2002221920139095e-05, + "loss": 1.7439, + "step": 1396 + }, + { + "epoch": 0.6228265715559519, + "grad_norm": 0.21106040477752686, + "learning_rate": 3.1938863277796875e-05, + "loss": 1.6987, + "step": 1397 + }, + { + "epoch": 0.623272403031654, + "grad_norm": 0.20886807143688202, + "learning_rate": 3.1875525709617733e-05, + "loss": 1.6589, + "step": 1398 + }, + { + "epoch": 0.6237182345073562, + "grad_norm": 0.2203982025384903, + "learning_rate": 3.1812209381184564e-05, + "loss": 1.7644, + "step": 1399 + }, + { + "epoch": 0.6241640659830584, + "grad_norm": 0.2029838114976883, + "learning_rate": 3.174891445802473e-05, + "loss": 1.6191, + "step": 1400 + }, + { + "epoch": 0.6246098974587606, + "grad_norm": 0.2111927568912506, + "learning_rate": 3.168564110560961e-05, + "loss": 1.6278, + "step": 1401 + }, + { + "epoch": 0.6250557289344628, + "grad_norm": 0.21242134273052216, + "learning_rate": 3.162238948935423e-05, + "loss": 1.6946, + "step": 1402 + }, + { + "epoch": 0.6255015604101649, + "grad_norm": 0.2124587893486023, + "learning_rate": 3.155915977461674e-05, + "loss": 1.6792, + "step": 1403 + }, + { + "epoch": 0.6259473918858671, + "grad_norm": 0.21112290024757385, + "learning_rate": 3.149595212669808e-05, + "loss": 1.6724, + "step": 1404 + }, + { + "epoch": 0.6263932233615693, + "grad_norm": 0.21580274403095245, + "learning_rate": 3.1432766710841456e-05, + "loss": 1.7356, + "step": 1405 + }, + { + "epoch": 0.6268390548372715, + "grad_norm": 0.20503874123096466, + "learning_rate": 3.1369603692232e-05, + "loss": 1.6348, + "step": 1406 + }, + { + "epoch": 0.6272848863129737, + "grad_norm": 0.2640460431575775, + "learning_rate": 3.130646323599626e-05, + "loss": 1.7351, + "step": 1407 + }, + { + "epoch": 0.6277307177886758, + "grad_norm": 0.20648761093616486, + "learning_rate": 3.124334550720182e-05, + "loss": 1.7541, + "step": 1408 + }, + { + "epoch": 0.628176549264378, + "grad_norm": 0.22613625228405, + "learning_rate": 3.1180250670856844e-05, + "loss": 1.7837, + "step": 1409 + }, + { + "epoch": 0.6286223807400803, + "grad_norm": 0.21720516681671143, + "learning_rate": 3.11171788919096e-05, + "loss": 1.698, + "step": 1410 + }, + { + "epoch": 0.6290682122157825, + "grad_norm": 0.20562918484210968, + "learning_rate": 3.1054130335248166e-05, + "loss": 1.6787, + "step": 1411 + }, + { + "epoch": 0.6295140436914847, + "grad_norm": 0.20738881826400757, + "learning_rate": 3.099110516569982e-05, + "loss": 1.6394, + "step": 1412 + }, + { + "epoch": 0.6299598751671868, + "grad_norm": 0.21101416647434235, + "learning_rate": 3.092810354803077e-05, + "loss": 1.6561, + "step": 1413 + }, + { + "epoch": 0.630405706642889, + "grad_norm": 0.22874417901039124, + "learning_rate": 3.0865125646945644e-05, + "loss": 1.792, + "step": 1414 + }, + { + "epoch": 0.6308515381185912, + "grad_norm": 0.21608386933803558, + "learning_rate": 3.080217162708699e-05, + "loss": 1.6984, + "step": 1415 + }, + { + "epoch": 0.6312973695942934, + "grad_norm": 0.2127838283777237, + "learning_rate": 3.0739241653035024e-05, + "loss": 1.7198, + "step": 1416 + }, + { + "epoch": 0.6317432010699956, + "grad_norm": 0.21969813108444214, + "learning_rate": 3.0676335889307046e-05, + "loss": 1.7305, + "step": 1417 + }, + { + "epoch": 0.6321890325456977, + "grad_norm": 0.22311647236347198, + "learning_rate": 3.061345450035709e-05, + "loss": 1.6646, + "step": 1418 + }, + { + "epoch": 0.6326348640213999, + "grad_norm": 0.21823038160800934, + "learning_rate": 3.055059765057541e-05, + "loss": 1.7181, + "step": 1419 + }, + { + "epoch": 0.6330806954971021, + "grad_norm": 0.20873479545116425, + "learning_rate": 3.048776550428818e-05, + "loss": 1.6317, + "step": 1420 + }, + { + "epoch": 0.6335265269728043, + "grad_norm": 0.21145819127559662, + "learning_rate": 3.0424958225756973e-05, + "loss": 1.7466, + "step": 1421 + }, + { + "epoch": 0.6339723584485064, + "grad_norm": 0.21070320904254913, + "learning_rate": 3.03621759791783e-05, + "loss": 1.6848, + "step": 1422 + }, + { + "epoch": 0.6344181899242086, + "grad_norm": 0.20529739558696747, + "learning_rate": 3.0299418928683297e-05, + "loss": 1.6566, + "step": 1423 + }, + { + "epoch": 0.6348640213999108, + "grad_norm": 0.22973240911960602, + "learning_rate": 3.023668723833719e-05, + "loss": 1.7464, + "step": 1424 + }, + { + "epoch": 0.635309852875613, + "grad_norm": 0.21204419434070587, + "learning_rate": 3.017398107213894e-05, + "loss": 1.5982, + "step": 1425 + }, + { + "epoch": 0.6357556843513152, + "grad_norm": 0.21231311559677124, + "learning_rate": 3.0111300594020724e-05, + "loss": 1.6837, + "step": 1426 + }, + { + "epoch": 0.6362015158270173, + "grad_norm": 0.21652869880199432, + "learning_rate": 3.0048645967847613e-05, + "loss": 1.6829, + "step": 1427 + }, + { + "epoch": 0.6366473473027195, + "grad_norm": 0.21283885836601257, + "learning_rate": 2.9986017357417096e-05, + "loss": 1.7203, + "step": 1428 + }, + { + "epoch": 0.6370931787784218, + "grad_norm": 0.22009681165218353, + "learning_rate": 2.99234149264586e-05, + "loss": 1.7014, + "step": 1429 + }, + { + "epoch": 0.637539010254124, + "grad_norm": 0.21199141442775726, + "learning_rate": 2.9860838838633177e-05, + "loss": 1.6197, + "step": 1430 + }, + { + "epoch": 0.6379848417298262, + "grad_norm": 0.21214723587036133, + "learning_rate": 2.9798289257532946e-05, + "loss": 1.6456, + "step": 1431 + }, + { + "epoch": 0.6384306732055283, + "grad_norm": 0.22607432305812836, + "learning_rate": 2.9735766346680778e-05, + "loss": 1.7629, + "step": 1432 + }, + { + "epoch": 0.6388765046812305, + "grad_norm": 0.21528464555740356, + "learning_rate": 2.9673270269529788e-05, + "loss": 1.6699, + "step": 1433 + }, + { + "epoch": 0.6393223361569327, + "grad_norm": 0.22296687960624695, + "learning_rate": 2.9610801189462965e-05, + "loss": 1.6775, + "step": 1434 + }, + { + "epoch": 0.6397681676326349, + "grad_norm": 0.20506487786769867, + "learning_rate": 2.954835926979272e-05, + "loss": 1.6319, + "step": 1435 + }, + { + "epoch": 0.6402139991083371, + "grad_norm": 0.21017904579639435, + "learning_rate": 2.948594467376041e-05, + "loss": 1.7399, + "step": 1436 + }, + { + "epoch": 0.6406598305840392, + "grad_norm": 0.21416164934635162, + "learning_rate": 2.9423557564536044e-05, + "loss": 1.5833, + "step": 1437 + }, + { + "epoch": 0.6411056620597414, + "grad_norm": 0.2149401605129242, + "learning_rate": 2.9361198105217688e-05, + "loss": 1.7031, + "step": 1438 + }, + { + "epoch": 0.6415514935354436, + "grad_norm": 0.21111612021923065, + "learning_rate": 2.9298866458831177e-05, + "loss": 1.7359, + "step": 1439 + }, + { + "epoch": 0.6419973250111458, + "grad_norm": 0.2218097597360611, + "learning_rate": 2.9236562788329607e-05, + "loss": 1.7301, + "step": 1440 + }, + { + "epoch": 0.642443156486848, + "grad_norm": 0.22060351073741913, + "learning_rate": 2.9174287256592965e-05, + "loss": 1.7308, + "step": 1441 + }, + { + "epoch": 0.6428889879625501, + "grad_norm": 0.21140018105506897, + "learning_rate": 2.911204002642767e-05, + "loss": 1.7422, + "step": 1442 + }, + { + "epoch": 0.6433348194382523, + "grad_norm": 0.21067947149276733, + "learning_rate": 2.9049821260566103e-05, + "loss": 1.7614, + "step": 1443 + }, + { + "epoch": 0.6437806509139545, + "grad_norm": 0.21665067970752716, + "learning_rate": 2.8987631121666296e-05, + "loss": 1.7804, + "step": 1444 + }, + { + "epoch": 0.6442264823896567, + "grad_norm": 0.2272922247648239, + "learning_rate": 2.8925469772311405e-05, + "loss": 1.7391, + "step": 1445 + }, + { + "epoch": 0.644672313865359, + "grad_norm": 0.21021457016468048, + "learning_rate": 2.886333737500935e-05, + "loss": 1.652, + "step": 1446 + }, + { + "epoch": 0.645118145341061, + "grad_norm": 0.2098311483860016, + "learning_rate": 2.8801234092192314e-05, + "loss": 1.6935, + "step": 1447 + }, + { + "epoch": 0.6455639768167633, + "grad_norm": 0.21540747582912445, + "learning_rate": 2.873916008621641e-05, + "loss": 1.7426, + "step": 1448 + }, + { + "epoch": 0.6460098082924655, + "grad_norm": 0.21947064995765686, + "learning_rate": 2.8677115519361218e-05, + "loss": 1.6578, + "step": 1449 + }, + { + "epoch": 0.6464556397681677, + "grad_norm": 0.21141056716442108, + "learning_rate": 2.8615100553829304e-05, + "loss": 1.679, + "step": 1450 + }, + { + "epoch": 0.6469014712438698, + "grad_norm": 0.20692133903503418, + "learning_rate": 2.85531153517459e-05, + "loss": 1.6553, + "step": 1451 + }, + { + "epoch": 0.647347302719572, + "grad_norm": 0.20739763975143433, + "learning_rate": 2.849116007515839e-05, + "loss": 1.6575, + "step": 1452 + }, + { + "epoch": 0.6477931341952742, + "grad_norm": 0.20650619268417358, + "learning_rate": 2.8429234886035967e-05, + "loss": 1.5827, + "step": 1453 + }, + { + "epoch": 0.6482389656709764, + "grad_norm": 0.2126653790473938, + "learning_rate": 2.8367339946269114e-05, + "loss": 1.7219, + "step": 1454 + }, + { + "epoch": 0.6486847971466786, + "grad_norm": 0.20047074556350708, + "learning_rate": 2.8305475417669277e-05, + "loss": 1.5027, + "step": 1455 + }, + { + "epoch": 0.6491306286223807, + "grad_norm": 0.21770769357681274, + "learning_rate": 2.8243641461968382e-05, + "loss": 1.7409, + "step": 1456 + }, + { + "epoch": 0.6495764600980829, + "grad_norm": 0.21293190121650696, + "learning_rate": 2.8181838240818415e-05, + "loss": 1.6885, + "step": 1457 + }, + { + "epoch": 0.6500222915737851, + "grad_norm": 0.2154323160648346, + "learning_rate": 2.8120065915791053e-05, + "loss": 1.6696, + "step": 1458 + }, + { + "epoch": 0.6504681230494873, + "grad_norm": 0.2091473788022995, + "learning_rate": 2.8058324648377135e-05, + "loss": 1.6426, + "step": 1459 + }, + { + "epoch": 0.6509139545251895, + "grad_norm": 0.22156959772109985, + "learning_rate": 2.799661459998638e-05, + "loss": 1.6903, + "step": 1460 + }, + { + "epoch": 0.6513597860008916, + "grad_norm": 0.21561315655708313, + "learning_rate": 2.7934935931946836e-05, + "loss": 1.6615, + "step": 1461 + }, + { + "epoch": 0.6518056174765938, + "grad_norm": 0.2081414759159088, + "learning_rate": 2.7873288805504545e-05, + "loss": 1.6987, + "step": 1462 + }, + { + "epoch": 0.652251448952296, + "grad_norm": 0.21595971286296844, + "learning_rate": 2.781167338182309e-05, + "loss": 1.6551, + "step": 1463 + }, + { + "epoch": 0.6526972804279982, + "grad_norm": 0.20916205644607544, + "learning_rate": 2.7750089821983148e-05, + "loss": 1.6845, + "step": 1464 + }, + { + "epoch": 0.6531431119037004, + "grad_norm": 0.21937905251979828, + "learning_rate": 2.768853828698212e-05, + "loss": 1.7105, + "step": 1465 + }, + { + "epoch": 0.6535889433794025, + "grad_norm": 0.20495536923408508, + "learning_rate": 2.7627018937733656e-05, + "loss": 1.677, + "step": 1466 + }, + { + "epoch": 0.6540347748551048, + "grad_norm": 0.20795701444149017, + "learning_rate": 2.756553193506731e-05, + "loss": 1.6625, + "step": 1467 + }, + { + "epoch": 0.654480606330807, + "grad_norm": 0.21184998750686646, + "learning_rate": 2.7504077439728017e-05, + "loss": 1.7021, + "step": 1468 + }, + { + "epoch": 0.6549264378065092, + "grad_norm": 0.20234046876430511, + "learning_rate": 2.7442655612375772e-05, + "loss": 1.6386, + "step": 1469 + }, + { + "epoch": 0.6553722692822114, + "grad_norm": 0.20407864451408386, + "learning_rate": 2.738126661358517e-05, + "loss": 1.6068, + "step": 1470 + }, + { + "epoch": 0.6558181007579135, + "grad_norm": 0.2117338925600052, + "learning_rate": 2.7319910603844916e-05, + "loss": 1.7043, + "step": 1471 + }, + { + "epoch": 0.6562639322336157, + "grad_norm": 0.22000300884246826, + "learning_rate": 2.7258587743557557e-05, + "loss": 1.7204, + "step": 1472 + }, + { + "epoch": 0.6567097637093179, + "grad_norm": 0.20668327808380127, + "learning_rate": 2.719729819303892e-05, + "loss": 1.7082, + "step": 1473 + }, + { + "epoch": 0.6571555951850201, + "grad_norm": 0.21397939324378967, + "learning_rate": 2.7136042112517794e-05, + "loss": 1.7018, + "step": 1474 + }, + { + "epoch": 0.6576014266607223, + "grad_norm": 0.218618705868721, + "learning_rate": 2.7074819662135408e-05, + "loss": 1.7575, + "step": 1475 + }, + { + "epoch": 0.6580472581364244, + "grad_norm": 0.2082207053899765, + "learning_rate": 2.7013631001945136e-05, + "loss": 1.7218, + "step": 1476 + }, + { + "epoch": 0.6584930896121266, + "grad_norm": 0.2075459063053131, + "learning_rate": 2.6952476291911983e-05, + "loss": 1.665, + "step": 1477 + }, + { + "epoch": 0.6589389210878288, + "grad_norm": 0.20892712473869324, + "learning_rate": 2.6891355691912183e-05, + "loss": 1.6645, + "step": 1478 + }, + { + "epoch": 0.659384752563531, + "grad_norm": 0.21131663024425507, + "learning_rate": 2.683026936173285e-05, + "loss": 1.7297, + "step": 1479 + }, + { + "epoch": 0.6598305840392331, + "grad_norm": 0.21774396300315857, + "learning_rate": 2.6769217461071435e-05, + "loss": 1.6501, + "step": 1480 + }, + { + "epoch": 0.6602764155149353, + "grad_norm": 0.20743833482265472, + "learning_rate": 2.6708200149535433e-05, + "loss": 1.7012, + "step": 1481 + }, + { + "epoch": 0.6607222469906375, + "grad_norm": 0.21155858039855957, + "learning_rate": 2.6647217586641915e-05, + "loss": 1.6919, + "step": 1482 + }, + { + "epoch": 0.6611680784663397, + "grad_norm": 0.21320167183876038, + "learning_rate": 2.658626993181705e-05, + "loss": 1.6932, + "step": 1483 + }, + { + "epoch": 0.661613909942042, + "grad_norm": 0.20776276290416718, + "learning_rate": 2.6525357344395825e-05, + "loss": 1.6292, + "step": 1484 + }, + { + "epoch": 0.662059741417744, + "grad_norm": 0.20588624477386475, + "learning_rate": 2.64644799836215e-05, + "loss": 1.5796, + "step": 1485 + }, + { + "epoch": 0.6625055728934462, + "grad_norm": 0.21364407241344452, + "learning_rate": 2.640363800864526e-05, + "loss": 1.7093, + "step": 1486 + }, + { + "epoch": 0.6629514043691485, + "grad_norm": 0.21058973670005798, + "learning_rate": 2.6342831578525766e-05, + "loss": 1.6617, + "step": 1487 + }, + { + "epoch": 0.6633972358448507, + "grad_norm": 0.23640546202659607, + "learning_rate": 2.6282060852228767e-05, + "loss": 1.7007, + "step": 1488 + }, + { + "epoch": 0.6638430673205529, + "grad_norm": 0.21117572486400604, + "learning_rate": 2.6221325988626686e-05, + "loss": 1.7067, + "step": 1489 + }, + { + "epoch": 0.664288898796255, + "grad_norm": 0.22875715792179108, + "learning_rate": 2.6160627146498162e-05, + "loss": 1.6788, + "step": 1490 + }, + { + "epoch": 0.6647347302719572, + "grad_norm": 0.20593346655368805, + "learning_rate": 2.609996448452769e-05, + "loss": 1.7064, + "step": 1491 + }, + { + "epoch": 0.6651805617476594, + "grad_norm": 0.2149941325187683, + "learning_rate": 2.603933816130514e-05, + "loss": 1.6716, + "step": 1492 + }, + { + "epoch": 0.6656263932233616, + "grad_norm": 0.20980104804039001, + "learning_rate": 2.5978748335325447e-05, + "loss": 1.6276, + "step": 1493 + }, + { + "epoch": 0.6660722246990638, + "grad_norm": 0.2014513611793518, + "learning_rate": 2.591819516498804e-05, + "loss": 1.5863, + "step": 1494 + }, + { + "epoch": 0.6665180561747659, + "grad_norm": 0.2191655933856964, + "learning_rate": 2.5857678808596606e-05, + "loss": 1.6928, + "step": 1495 + }, + { + "epoch": 0.6669638876504681, + "grad_norm": 0.2146279513835907, + "learning_rate": 2.5797199424358558e-05, + "loss": 1.715, + "step": 1496 + }, + { + "epoch": 0.6674097191261703, + "grad_norm": 0.220846489071846, + "learning_rate": 2.573675717038463e-05, + "loss": 1.7091, + "step": 1497 + }, + { + "epoch": 0.6678555506018725, + "grad_norm": 0.21300770342350006, + "learning_rate": 2.5676352204688533e-05, + "loss": 1.6565, + "step": 1498 + }, + { + "epoch": 0.6683013820775747, + "grad_norm": 0.21232812106609344, + "learning_rate": 2.561598468518644e-05, + "loss": 1.6675, + "step": 1499 + }, + { + "epoch": 0.6687472135532768, + "grad_norm": 0.22252769768238068, + "learning_rate": 2.5555654769696682e-05, + "loss": 1.788, + "step": 1500 + } + ], + "logging_steps": 1, + "max_steps": 2243, + "num_input_tokens_seen": 0, + "num_train_epochs": 1, + "save_steps": 300, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 1.131385184059392e+18, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}