|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.19745502413339183, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0013163668275559457, |
|
"grad_norm": 1.65111437852943, |
|
"learning_rate": 2e-05, |
|
"loss": 1.7474, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0026327336551118913, |
|
"grad_norm": 1.7295359235133803, |
|
"learning_rate": 1.999777729859618e-05, |
|
"loss": 1.712, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.003949100482667837, |
|
"grad_norm": 10.447342724724326, |
|
"learning_rate": 1.9991110182465032e-05, |
|
"loss": 2.0566, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.005265467310223783, |
|
"grad_norm": 8.439236569082253, |
|
"learning_rate": 1.9980001615408228e-05, |
|
"loss": 2.0446, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006581834137779728, |
|
"grad_norm": 5.502178376287468, |
|
"learning_rate": 1.9964456535631287e-05, |
|
"loss": 1.9864, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.007898200965335674, |
|
"grad_norm": 3.4244942197147945, |
|
"learning_rate": 1.9944481853548335e-05, |
|
"loss": 1.9393, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.009214567792891619, |
|
"grad_norm": 2.2479839558307013, |
|
"learning_rate": 1.9920086448710162e-05, |
|
"loss": 1.8865, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.010530934620447565, |
|
"grad_norm": 1.7351334611523515, |
|
"learning_rate": 1.9891281165856876e-05, |
|
"loss": 1.8423, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01184730144800351, |
|
"grad_norm": 1.4656029971843716, |
|
"learning_rate": 1.9858078810097004e-05, |
|
"loss": 1.8586, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.013163668275559455, |
|
"grad_norm": 1.3773721079335524, |
|
"learning_rate": 1.98204941412151e-05, |
|
"loss": 1.8524, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.014480035103115402, |
|
"grad_norm": 1.1578401907098703, |
|
"learning_rate": 1.9778543867110428e-05, |
|
"loss": 1.8525, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.015796401930671347, |
|
"grad_norm": 1.0088566042734737, |
|
"learning_rate": 1.9732246636369605e-05, |
|
"loss": 1.8441, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.017112768758227294, |
|
"grad_norm": 0.9601192905672813, |
|
"learning_rate": 1.968162302997659e-05, |
|
"loss": 1.8396, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.018429135585783237, |
|
"grad_norm": 0.9248937703572266, |
|
"learning_rate": 1.962669555216358e-05, |
|
"loss": 1.8251, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.019745502413339184, |
|
"grad_norm": 0.9073184114547326, |
|
"learning_rate": 1.9567488620406984e-05, |
|
"loss": 1.8546, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02106186924089513, |
|
"grad_norm": 0.7850875076115653, |
|
"learning_rate": 1.9504028554572865e-05, |
|
"loss": 1.8354, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.022378236068451074, |
|
"grad_norm": 0.8835800418613018, |
|
"learning_rate": 1.943634356521671e-05, |
|
"loss": 1.8499, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02369460289600702, |
|
"grad_norm": 0.763280002887791, |
|
"learning_rate": 1.9364463741042694e-05, |
|
"loss": 1.8137, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.025010969723562967, |
|
"grad_norm": 0.7586586554363934, |
|
"learning_rate": 1.928842103552803e-05, |
|
"loss": 1.8228, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02632733655111891, |
|
"grad_norm": 0.7075808706792394, |
|
"learning_rate": 1.920824925271838e-05, |
|
"loss": 1.8425, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.027643703378674857, |
|
"grad_norm": 0.7601165553708226, |
|
"learning_rate": 1.9123984032200586e-05, |
|
"loss": 1.8197, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.028960070206230804, |
|
"grad_norm": 0.7193254428686906, |
|
"learning_rate": 1.9035662833259433e-05, |
|
"loss": 1.8407, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.030276437033786747, |
|
"grad_norm": 0.7215706441009537, |
|
"learning_rate": 1.8943324918225495e-05, |
|
"loss": 1.8395, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.031592803861342694, |
|
"grad_norm": 0.8203870414871476, |
|
"learning_rate": 1.8847011335021447e-05, |
|
"loss": 1.8271, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03290917068889864, |
|
"grad_norm": 0.7431586249459948, |
|
"learning_rate": 1.874676489891461e-05, |
|
"loss": 1.8337, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03422553751645459, |
|
"grad_norm": 0.7737992048688279, |
|
"learning_rate": 1.8642630173483832e-05, |
|
"loss": 1.8356, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03554190434401053, |
|
"grad_norm": 0.7804596966065455, |
|
"learning_rate": 1.85346534508092e-05, |
|
"loss": 1.8412, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.036858271171566474, |
|
"grad_norm": 0.6931515626982303, |
|
"learning_rate": 1.8422882730893323e-05, |
|
"loss": 1.8614, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03817463799912242, |
|
"grad_norm": 0.8084803042078094, |
|
"learning_rate": 1.8307367700323412e-05, |
|
"loss": 1.8593, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03949100482667837, |
|
"grad_norm": 0.626512906169853, |
|
"learning_rate": 1.8188159710183595e-05, |
|
"loss": 1.831, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.040807371654234315, |
|
"grad_norm": 0.7813065255670536, |
|
"learning_rate": 1.8065311753227272e-05, |
|
"loss": 1.8762, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.04212373848179026, |
|
"grad_norm": 0.6706823503135386, |
|
"learning_rate": 1.7938878440319722e-05, |
|
"loss": 1.8683, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0434401053093462, |
|
"grad_norm": 0.6364652672451495, |
|
"learning_rate": 1.7808915976161364e-05, |
|
"loss": 1.8733, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04475647213690215, |
|
"grad_norm": 0.7368025681738634, |
|
"learning_rate": 1.7675482134302503e-05, |
|
"loss": 1.8544, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.046072838964458095, |
|
"grad_norm": 0.5548763381698079, |
|
"learning_rate": 1.753863623146066e-05, |
|
"loss": 1.8598, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04738920579201404, |
|
"grad_norm": 0.7780814291300651, |
|
"learning_rate": 1.7398439101151908e-05, |
|
"loss": 1.8469, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04870557261956999, |
|
"grad_norm": 0.6521757251300341, |
|
"learning_rate": 1.7254953066647915e-05, |
|
"loss": 1.8524, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.050021939447125935, |
|
"grad_norm": 0.7722078567672159, |
|
"learning_rate": 1.710824191327075e-05, |
|
"loss": 1.8855, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.051338306274681875, |
|
"grad_norm": 0.7298655195609485, |
|
"learning_rate": 1.695837086003772e-05, |
|
"loss": 1.8635, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05265467310223782, |
|
"grad_norm": 0.6468602911451591, |
|
"learning_rate": 1.680540653066891e-05, |
|
"loss": 1.863, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05397103992979377, |
|
"grad_norm": 0.7916429438105029, |
|
"learning_rate": 1.6649416923970248e-05, |
|
"loss": 1.8646, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.055287406757349715, |
|
"grad_norm": 0.5474264565504703, |
|
"learning_rate": 1.649047138360529e-05, |
|
"loss": 1.8576, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05660377358490566, |
|
"grad_norm": 0.7518985692585881, |
|
"learning_rate": 1.632864056726917e-05, |
|
"loss": 1.8606, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.05792014041246161, |
|
"grad_norm": 0.5585165468856192, |
|
"learning_rate": 1.6163996415278423e-05, |
|
"loss": 1.8872, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.05923650724001755, |
|
"grad_norm": 0.7084850146213678, |
|
"learning_rate": 1.5996612118590604e-05, |
|
"loss": 1.8692, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.060552874067573495, |
|
"grad_norm": 0.6998461172771332, |
|
"learning_rate": 1.5826562086267956e-05, |
|
"loss": 1.8885, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06186924089512944, |
|
"grad_norm": 0.7300033950295419, |
|
"learning_rate": 1.565392191239959e-05, |
|
"loss": 1.8772, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06318560772268539, |
|
"grad_norm": 0.7899786680966636, |
|
"learning_rate": 1.5478768342496872e-05, |
|
"loss": 1.8685, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06450197455024133, |
|
"grad_norm": 0.6561757387110828, |
|
"learning_rate": 1.5301179239376936e-05, |
|
"loss": 1.8842, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06581834137779728, |
|
"grad_norm": 0.7117935357444856, |
|
"learning_rate": 1.512123354854955e-05, |
|
"loss": 1.865, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06713470820535322, |
|
"grad_norm": 0.5646089364283718, |
|
"learning_rate": 1.4939011263122635e-05, |
|
"loss": 1.8651, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06845107503290918, |
|
"grad_norm": 0.6673985877657005, |
|
"learning_rate": 1.4754593388242117e-05, |
|
"loss": 1.8768, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.06976744186046512, |
|
"grad_norm": 0.5092546824434766, |
|
"learning_rate": 1.4568061905081874e-05, |
|
"loss": 1.868, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.07108380868802106, |
|
"grad_norm": 0.6140848791437851, |
|
"learning_rate": 1.4379499734399797e-05, |
|
"loss": 1.8817, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07240017551557701, |
|
"grad_norm": 0.5766820950125316, |
|
"learning_rate": 1.4188990699676186e-05, |
|
"loss": 1.8799, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07371654234313295, |
|
"grad_norm": 0.6127504628805722, |
|
"learning_rate": 1.3996619489850822e-05, |
|
"loss": 1.8936, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0750329091706889, |
|
"grad_norm": 0.6266059301264697, |
|
"learning_rate": 1.3802471621675337e-05, |
|
"loss": 1.8784, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07634927599824484, |
|
"grad_norm": 0.5534750017049672, |
|
"learning_rate": 1.3606633401697557e-05, |
|
"loss": 1.9011, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.0776656428258008, |
|
"grad_norm": 0.6175892828039317, |
|
"learning_rate": 1.340919188789477e-05, |
|
"loss": 1.8837, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.07898200965335674, |
|
"grad_norm": 0.5258068472184501, |
|
"learning_rate": 1.3210234850972966e-05, |
|
"loss": 1.9028, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08029837648091268, |
|
"grad_norm": 0.5803172257476928, |
|
"learning_rate": 1.300985073534919e-05, |
|
"loss": 1.9027, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.08161474330846863, |
|
"grad_norm": 0.46576381858780536, |
|
"learning_rate": 1.280812861983446e-05, |
|
"loss": 1.8829, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.08293111013602457, |
|
"grad_norm": 0.5243424882503339, |
|
"learning_rate": 1.2605158178034656e-05, |
|
"loss": 1.8897, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.08424747696358052, |
|
"grad_norm": 0.44275509950840103, |
|
"learning_rate": 1.2401029638486952e-05, |
|
"loss": 1.8689, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08556384379113646, |
|
"grad_norm": 0.47692124746423276, |
|
"learning_rate": 1.219583374454963e-05, |
|
"loss": 1.9078, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0868802106186924, |
|
"grad_norm": 0.423870884057128, |
|
"learning_rate": 1.1989661714063e-05, |
|
"loss": 1.9246, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08819657744624836, |
|
"grad_norm": 0.4344116733448028, |
|
"learning_rate": 1.1782605198799371e-05, |
|
"loss": 1.9138, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.0895129442738043, |
|
"grad_norm": 0.44365257109026207, |
|
"learning_rate": 1.157475624372018e-05, |
|
"loss": 1.8872, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.09082931110136025, |
|
"grad_norm": 0.45705186620536725, |
|
"learning_rate": 1.1366207246058269e-05, |
|
"loss": 1.8984, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.09214567792891619, |
|
"grad_norm": 0.40175081883984465, |
|
"learning_rate": 1.1157050914243614e-05, |
|
"loss": 1.9134, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.09346204475647214, |
|
"grad_norm": 0.42726401125065505, |
|
"learning_rate": 1.0947380226690686e-05, |
|
"loss": 1.9097, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09477841158402808, |
|
"grad_norm": 0.3900674410637252, |
|
"learning_rate": 1.0737288390465792e-05, |
|
"loss": 1.9008, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09609477841158402, |
|
"grad_norm": 0.41225255645420644, |
|
"learning_rate": 1.0526868799852797e-05, |
|
"loss": 1.9243, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09741114523913998, |
|
"grad_norm": 0.3838092023163431, |
|
"learning_rate": 1.031621499483559e-05, |
|
"loss": 1.9182, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09872751206669592, |
|
"grad_norm": 0.3599445898903389, |
|
"learning_rate": 1.0105420619515798e-05, |
|
"loss": 1.9112, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.10004387889425187, |
|
"grad_norm": 0.3909645250507488, |
|
"learning_rate": 9.894579380484206e-06, |
|
"loss": 1.9276, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.10136024572180781, |
|
"grad_norm": 0.3640880651891862, |
|
"learning_rate": 9.683785005164412e-06, |
|
"loss": 1.9343, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.10267661254936375, |
|
"grad_norm": 0.3520227157576893, |
|
"learning_rate": 9.473131200147205e-06, |
|
"loss": 1.9412, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1039929793769197, |
|
"grad_norm": 0.36960565249242594, |
|
"learning_rate": 9.262711609534211e-06, |
|
"loss": 1.9272, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.10530934620447564, |
|
"grad_norm": 0.35697589279129394, |
|
"learning_rate": 9.052619773309318e-06, |
|
"loss": 1.9205, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1066257130320316, |
|
"grad_norm": 0.3539289322099185, |
|
"learning_rate": 8.842949085756389e-06, |
|
"loss": 1.9167, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.10794207985958754, |
|
"grad_norm": 0.377830507323211, |
|
"learning_rate": 8.633792753941733e-06, |
|
"loss": 1.9283, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.10925844668714349, |
|
"grad_norm": 0.32762948772243283, |
|
"learning_rate": 8.425243756279824e-06, |
|
"loss": 1.9078, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.11057481351469943, |
|
"grad_norm": 0.3218390246795396, |
|
"learning_rate": 8.217394801200632e-06, |
|
"loss": 1.9303, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.11189118034225537, |
|
"grad_norm": 0.36785562529934085, |
|
"learning_rate": 8.010338285937006e-06, |
|
"loss": 1.9196, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.11320754716981132, |
|
"grad_norm": 0.33474377063086946, |
|
"learning_rate": 7.804166255450372e-06, |
|
"loss": 1.9252, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.11452391399736726, |
|
"grad_norm": 0.3276135559258542, |
|
"learning_rate": 7.598970361513052e-06, |
|
"loss": 1.931, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.11584028082492322, |
|
"grad_norm": 0.34019785956502163, |
|
"learning_rate": 7.394841821965345e-06, |
|
"loss": 1.9104, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.11715664765247916, |
|
"grad_norm": 0.3038814019019217, |
|
"learning_rate": 7.191871380165538e-06, |
|
"loss": 1.936, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1184730144800351, |
|
"grad_norm": 0.2934690304673656, |
|
"learning_rate": 6.990149264650814e-06, |
|
"loss": 1.9285, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11978938130759105, |
|
"grad_norm": 0.3241397790607444, |
|
"learning_rate": 6.789765149027039e-06, |
|
"loss": 1.9357, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.12110574813514699, |
|
"grad_norm": 0.28870444952258223, |
|
"learning_rate": 6.590808112105232e-06, |
|
"loss": 1.954, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.12242211496270294, |
|
"grad_norm": 0.29485920601939425, |
|
"learning_rate": 6.3933665983024465e-06, |
|
"loss": 1.9394, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.12373848179025888, |
|
"grad_norm": 0.29142694778611705, |
|
"learning_rate": 6.197528378324664e-06, |
|
"loss": 1.9168, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.12505484861781482, |
|
"grad_norm": 0.2832369769565699, |
|
"learning_rate": 6.003380510149179e-06, |
|
"loss": 1.9466, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.12637121544537078, |
|
"grad_norm": 0.2730019638072709, |
|
"learning_rate": 5.8110093003238175e-06, |
|
"loss": 1.9697, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.12768758227292673, |
|
"grad_norm": 0.29650610897698637, |
|
"learning_rate": 5.620500265600206e-06, |
|
"loss": 1.9431, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.12900394910048266, |
|
"grad_norm": 0.28216205906101904, |
|
"learning_rate": 5.431938094918132e-06, |
|
"loss": 1.9555, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1303203159280386, |
|
"grad_norm": 0.26142115592109544, |
|
"learning_rate": 5.245406611757882e-06, |
|
"loss": 1.9546, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.13163668275559456, |
|
"grad_norm": 0.2663430873063966, |
|
"learning_rate": 5.060988736877366e-06, |
|
"loss": 1.9371, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13295304958315052, |
|
"grad_norm": 0.27203256121384983, |
|
"learning_rate": 4.878766451450451e-06, |
|
"loss": 1.9442, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.13426941641070644, |
|
"grad_norm": 0.2472440118588447, |
|
"learning_rate": 4.698820760623064e-06, |
|
"loss": 1.9397, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1355857832382624, |
|
"grad_norm": 0.24129115289522723, |
|
"learning_rate": 4.5212316575031325e-06, |
|
"loss": 1.9537, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.13690215006581835, |
|
"grad_norm": 0.24834249614222498, |
|
"learning_rate": 4.346078087600411e-06, |
|
"loss": 1.9582, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.13821851689337428, |
|
"grad_norm": 0.2343448970716797, |
|
"learning_rate": 4.173437913732048e-06, |
|
"loss": 1.9564, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.13953488372093023, |
|
"grad_norm": 0.22977461940610383, |
|
"learning_rate": 4.003387881409397e-06, |
|
"loss": 1.9612, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.14085125054848618, |
|
"grad_norm": 0.2327764341813503, |
|
"learning_rate": 3.836003584721577e-06, |
|
"loss": 1.9613, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1421676173760421, |
|
"grad_norm": 0.2361882693796626, |
|
"learning_rate": 3.6713594327308343e-06, |
|
"loss": 1.9468, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.14348398420359806, |
|
"grad_norm": 0.23347458789212797, |
|
"learning_rate": 3.509528616394716e-06, |
|
"loss": 1.9658, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.14480035103115402, |
|
"grad_norm": 0.22028219685938538, |
|
"learning_rate": 3.3505830760297543e-06, |
|
"loss": 1.9621, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.14611671785870997, |
|
"grad_norm": 0.21636698387704192, |
|
"learning_rate": 3.1945934693310897e-06, |
|
"loss": 1.9693, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.1474330846862659, |
|
"grad_norm": 0.22272439284871326, |
|
"learning_rate": 3.0416291399622834e-06, |
|
"loss": 1.9952, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.14874945151382185, |
|
"grad_norm": 0.2240698735197302, |
|
"learning_rate": 2.891758086729253e-06, |
|
"loss": 1.9484, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.1500658183413778, |
|
"grad_norm": 0.23983098594113533, |
|
"learning_rate": 2.7450469333520856e-06, |
|
"loss": 1.9546, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.15138218516893373, |
|
"grad_norm": 0.210678907298948, |
|
"learning_rate": 2.6015608988480956e-06, |
|
"loss": 1.9598, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.15269855199648968, |
|
"grad_norm": 0.2094708658742722, |
|
"learning_rate": 2.4613637685393433e-06, |
|
"loss": 1.9693, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.15401491882404564, |
|
"grad_norm": 0.2086143485980995, |
|
"learning_rate": 2.324517865697501e-06, |
|
"loss": 1.9438, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.1553312856516016, |
|
"grad_norm": 0.21377991371943222, |
|
"learning_rate": 2.19108402383864e-06, |
|
"loss": 1.9576, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.15664765247915752, |
|
"grad_norm": 0.205753248097387, |
|
"learning_rate": 2.06112155968028e-06, |
|
"loss": 1.9772, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.15796401930671347, |
|
"grad_norm": 0.21192191289793974, |
|
"learning_rate": 1.9346882467727323e-06, |
|
"loss": 1.9826, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.15928038613426942, |
|
"grad_norm": 0.21524883648102194, |
|
"learning_rate": 1.811840289816409e-06, |
|
"loss": 1.976, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.16059675296182535, |
|
"grad_norm": 0.20625990022421095, |
|
"learning_rate": 1.6926322996765899e-06, |
|
"loss": 1.9776, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.1619131197893813, |
|
"grad_norm": 0.20310346421796105, |
|
"learning_rate": 1.5771172691066793e-06, |
|
"loss": 1.9817, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.16322948661693726, |
|
"grad_norm": 0.208622897847129, |
|
"learning_rate": 1.4653465491908003e-06, |
|
"loss": 2.0059, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1645458534444932, |
|
"grad_norm": 0.21517659681508963, |
|
"learning_rate": 1.3573698265161683e-06, |
|
"loss": 1.9617, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.16586222027204914, |
|
"grad_norm": 0.20363550281699117, |
|
"learning_rate": 1.2532351010853916e-06, |
|
"loss": 1.9674, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.1671785870996051, |
|
"grad_norm": 0.2108213644456224, |
|
"learning_rate": 1.152988664978556e-06, |
|
"loss": 1.9756, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.16849495392716105, |
|
"grad_norm": 0.199640605228958, |
|
"learning_rate": 1.0566750817745076e-06, |
|
"loss": 1.9827, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.16981132075471697, |
|
"grad_norm": 0.19250372450550102, |
|
"learning_rate": 9.6433716674057e-07, |
|
"loss": 1.9753, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.17112768758227292, |
|
"grad_norm": 0.2075516417455384, |
|
"learning_rate": 8.760159677994174e-07, |
|
"loss": 1.98, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.17244405440982888, |
|
"grad_norm": 0.20463184448298366, |
|
"learning_rate": 7.91750747281621e-07, |
|
"loss": 1.9715, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.1737604212373848, |
|
"grad_norm": 0.20224889675782617, |
|
"learning_rate": 7.115789644719728e-07, |
|
"loss": 1.9926, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.17507678806494076, |
|
"grad_norm": 0.19807622313564802, |
|
"learning_rate": 6.355362589573078e-07, |
|
"loss": 1.9738, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.1763931548924967, |
|
"grad_norm": 0.20715058257872104, |
|
"learning_rate": 5.636564347832907e-07, |
|
"loss": 1.9795, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.17770952172005267, |
|
"grad_norm": 0.1987868248478531, |
|
"learning_rate": 4.95971445427137e-07, |
|
"loss": 1.9809, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1790258885476086, |
|
"grad_norm": 0.19761659136898976, |
|
"learning_rate": 4.3251137959302023e-07, |
|
"loss": 1.9689, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.18034225537516455, |
|
"grad_norm": 0.1979731497305122, |
|
"learning_rate": 3.733044478364234e-07, |
|
"loss": 1.9649, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.1816586222027205, |
|
"grad_norm": 0.19563118558862347, |
|
"learning_rate": 3.1837697002341293e-07, |
|
"loss": 1.9773, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.18297498903027642, |
|
"grad_norm": 0.19291494254497765, |
|
"learning_rate": 2.677533636303964e-07, |
|
"loss": 1.9676, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.18429135585783238, |
|
"grad_norm": 0.195403305387489, |
|
"learning_rate": 2.214561328895748e-07, |
|
"loss": 1.9723, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.18560772268538833, |
|
"grad_norm": 0.20351297594650425, |
|
"learning_rate": 1.7950585878489856e-07, |
|
"loss": 1.9773, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.18692408951294429, |
|
"grad_norm": 0.20075344659988453, |
|
"learning_rate": 1.419211899029971e-07, |
|
"loss": 1.969, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.1882404563405002, |
|
"grad_norm": 0.19473013664735456, |
|
"learning_rate": 1.0871883414312778e-07, |
|
"loss": 1.9798, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.18955682316805617, |
|
"grad_norm": 0.19013570239010694, |
|
"learning_rate": 7.99135512898408e-08, |
|
"loss": 1.9716, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.19087318999561212, |
|
"grad_norm": 0.19366203327357978, |
|
"learning_rate": 5.55181464516652e-08, |
|
"loss": 1.9733, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.19218955682316805, |
|
"grad_norm": 0.19674377568766022, |
|
"learning_rate": 3.554346436871581e-08, |
|
"loss": 1.969, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.193505923650724, |
|
"grad_norm": 0.19587314824937796, |
|
"learning_rate": 1.9998384591773945e-08, |
|
"loss": 1.9775, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.19482229047827995, |
|
"grad_norm": 0.1926937742324079, |
|
"learning_rate": 8.889817534969425e-09, |
|
"loss": 1.9853, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.1961386573058359, |
|
"grad_norm": 0.19720577027547917, |
|
"learning_rate": 2.222701403818972e-09, |
|
"loss": 1.9742, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.19745502413339183, |
|
"grad_norm": 0.2111140853212857, |
|
"learning_rate": 0.0, |
|
"loss": 1.9861, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.19745502413339183, |
|
"step": 150, |
|
"total_flos": 349677715193856.0, |
|
"train_loss": 1.9152453955014546, |
|
"train_runtime": 5213.1367, |
|
"train_samples_per_second": 58.007, |
|
"train_steps_per_second": 0.029 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 150, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 349677715193856.0, |
|
"train_batch_size": 42, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|