|
{ |
|
"best_metric": 3.076195240020752, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.07432181345224824, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0003716090672612412, |
|
"grad_norm": 89.91339111328125, |
|
"learning_rate": 1e-05, |
|
"loss": 12.8689, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0003716090672612412, |
|
"eval_loss": 3.4851932525634766, |
|
"eval_runtime": 51.7917, |
|
"eval_samples_per_second": 87.504, |
|
"eval_steps_per_second": 21.876, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0007432181345224824, |
|
"grad_norm": 75.49945831298828, |
|
"learning_rate": 2e-05, |
|
"loss": 13.2491, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0011148272017837235, |
|
"grad_norm": 100.03617095947266, |
|
"learning_rate": 3e-05, |
|
"loss": 12.7671, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0014864362690449647, |
|
"grad_norm": 77.02920532226562, |
|
"learning_rate": 4e-05, |
|
"loss": 13.5062, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0018580453363062058, |
|
"grad_norm": 61.53122329711914, |
|
"learning_rate": 5e-05, |
|
"loss": 13.1202, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.002229654403567447, |
|
"grad_norm": 55.57686996459961, |
|
"learning_rate": 6e-05, |
|
"loss": 13.0141, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.002601263470828688, |
|
"grad_norm": 50.34519577026367, |
|
"learning_rate": 7e-05, |
|
"loss": 12.3262, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0029728725380899295, |
|
"grad_norm": 49.633968353271484, |
|
"learning_rate": 8e-05, |
|
"loss": 12.7377, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0033444816053511705, |
|
"grad_norm": 58.044288635253906, |
|
"learning_rate": 9e-05, |
|
"loss": 13.0712, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0037160906726124115, |
|
"grad_norm": 55.974037170410156, |
|
"learning_rate": 0.0001, |
|
"loss": 13.4715, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.004087699739873653, |
|
"grad_norm": 61.226139068603516, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 13.1595, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.004459308807134894, |
|
"grad_norm": 67.331787109375, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 12.8342, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.004830917874396135, |
|
"grad_norm": 49.407108306884766, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 13.2984, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.005202526941657376, |
|
"grad_norm": 70.76209259033203, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 12.9778, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.005574136008918618, |
|
"grad_norm": 55.556880950927734, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 13.2762, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.005945745076179859, |
|
"grad_norm": 57.44112777709961, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 13.4604, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0063173541434411, |
|
"grad_norm": 61.65849304199219, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 13.5111, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.006688963210702341, |
|
"grad_norm": 58.80717468261719, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 12.933, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.007060572277963582, |
|
"grad_norm": 57.13853454589844, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 13.6066, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.007432181345224823, |
|
"grad_norm": 58.7703742980957, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 13.801, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.007803790412486065, |
|
"grad_norm": 71.7556381225586, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 13.8192, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.008175399479747306, |
|
"grad_norm": 56.83869171142578, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 13.3882, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.008547008547008548, |
|
"grad_norm": 68.50532531738281, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 13.5063, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.008918617614269788, |
|
"grad_norm": 64.328369140625, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 13.4745, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.00929022668153103, |
|
"grad_norm": 78.07742309570312, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 13.2238, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.00966183574879227, |
|
"grad_norm": 70.4471664428711, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 13.0755, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.010033444816053512, |
|
"grad_norm": 57.33465576171875, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 13.5927, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.010405053883314752, |
|
"grad_norm": 54.69197463989258, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 13.4678, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.010776662950575994, |
|
"grad_norm": 60.369022369384766, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 12.9709, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.011148272017837236, |
|
"grad_norm": 53.06597137451172, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 13.1473, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.011519881085098476, |
|
"grad_norm": 56.796897888183594, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 12.8153, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.011891490152359718, |
|
"grad_norm": 55.94731903076172, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 12.649, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.012263099219620958, |
|
"grad_norm": 60.808738708496094, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 12.5036, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0126347082868822, |
|
"grad_norm": 61.68337631225586, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 12.7729, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.013006317354143442, |
|
"grad_norm": 55.44351577758789, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 13.3658, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.013377926421404682, |
|
"grad_norm": 60.463138580322266, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 12.8666, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.013749535488665924, |
|
"grad_norm": 60.136474609375, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 13.2752, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.014121144555927164, |
|
"grad_norm": 61.477603912353516, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 13.3831, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.014492753623188406, |
|
"grad_norm": 65.39891052246094, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 12.3385, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.014864362690449646, |
|
"grad_norm": 48.332157135009766, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 12.8809, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.015235971757710888, |
|
"grad_norm": 64.07525634765625, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 13.1503, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.01560758082497213, |
|
"grad_norm": 61.306514739990234, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 13.6669, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.015979189892233372, |
|
"grad_norm": 57.3792610168457, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 14.3407, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.016350798959494612, |
|
"grad_norm": 49.27093505859375, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 13.7153, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.016722408026755852, |
|
"grad_norm": 44.54463577270508, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 13.31, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.017094017094017096, |
|
"grad_norm": 49.52144241333008, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 13.5407, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.017465626161278336, |
|
"grad_norm": 59.571197509765625, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 13.4758, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.017837235228539576, |
|
"grad_norm": 47.71146774291992, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 13.6893, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.018208844295800816, |
|
"grad_norm": 52.587608337402344, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 15.3526, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.01858045336306206, |
|
"grad_norm": 45.097713470458984, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 15.5272, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01858045336306206, |
|
"eval_loss": 3.256963014602661, |
|
"eval_runtime": 51.9824, |
|
"eval_samples_per_second": 87.183, |
|
"eval_steps_per_second": 21.796, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0189520624303233, |
|
"grad_norm": 20.3822021484375, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 12.8168, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.01932367149758454, |
|
"grad_norm": 22.23453712463379, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 12.9706, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.019695280564845784, |
|
"grad_norm": 21.059741973876953, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 12.7409, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.020066889632107024, |
|
"grad_norm": 20.219327926635742, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 13.0011, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.020438498699368264, |
|
"grad_norm": 23.87364959716797, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 12.6852, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.020810107766629504, |
|
"grad_norm": 20.999147415161133, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 12.8306, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.021181716833890748, |
|
"grad_norm": 21.016027450561523, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 12.928, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.021553325901151988, |
|
"grad_norm": 21.58234405517578, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 13.0229, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.021924934968413228, |
|
"grad_norm": 22.88654136657715, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 12.0128, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.022296544035674472, |
|
"grad_norm": 20.62936019897461, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 12.3145, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.022668153102935712, |
|
"grad_norm": 22.425981521606445, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 12.3533, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.023039762170196952, |
|
"grad_norm": 21.995954513549805, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 12.8239, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.023411371237458192, |
|
"grad_norm": 23.067846298217773, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 13.1718, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.023782980304719436, |
|
"grad_norm": 27.200054168701172, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 12.663, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.024154589371980676, |
|
"grad_norm": 23.297252655029297, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 12.2497, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.024526198439241916, |
|
"grad_norm": 25.43965721130371, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 12.2888, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.02489780750650316, |
|
"grad_norm": 24.800180435180664, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 11.9027, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.0252694165737644, |
|
"grad_norm": 26.041688919067383, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 12.3735, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.02564102564102564, |
|
"grad_norm": 24.485660552978516, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 12.9005, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.026012634708286884, |
|
"grad_norm": 27.68210220336914, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 12.2957, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.026384243775548124, |
|
"grad_norm": 27.342363357543945, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 12.4048, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.026755852842809364, |
|
"grad_norm": 26.84868812561035, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 12.5084, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.027127461910070604, |
|
"grad_norm": 28.721134185791016, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 12.3547, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.027499070977331848, |
|
"grad_norm": 30.7074031829834, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 12.5016, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.027870680044593088, |
|
"grad_norm": 30.42992401123047, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 12.6369, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.028242289111854328, |
|
"grad_norm": 30.07449722290039, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 12.439, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.028613898179115572, |
|
"grad_norm": 27.86932373046875, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 13.5454, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.028985507246376812, |
|
"grad_norm": 24.85785484313965, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 12.5962, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.029357116313638052, |
|
"grad_norm": 27.810264587402344, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 12.9454, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.029728725380899292, |
|
"grad_norm": 30.700931549072266, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 11.9123, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.030100334448160536, |
|
"grad_norm": 30.442121505737305, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 11.9333, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.030471943515421776, |
|
"grad_norm": 35.74739074707031, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 11.866, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.030843552582683016, |
|
"grad_norm": 33.87689208984375, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 12.5872, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.03121516164994426, |
|
"grad_norm": 39.369693756103516, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 12.3437, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.031586770717205497, |
|
"grad_norm": 37.18006896972656, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 12.5386, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.031958379784466744, |
|
"grad_norm": 43.344722747802734, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 11.9742, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.032329988851727984, |
|
"grad_norm": 38.42266845703125, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 12.0524, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.032701597918989224, |
|
"grad_norm": 36.284507751464844, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 12.3979, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.033073206986250464, |
|
"grad_norm": 42.46220016479492, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 11.8786, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.033444816053511704, |
|
"grad_norm": 39.43769836425781, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 11.391, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.033816425120772944, |
|
"grad_norm": 39.12789535522461, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 11.8605, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.03418803418803419, |
|
"grad_norm": 41.41144561767578, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 12.5651, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.03455964325529543, |
|
"grad_norm": 42.6218376159668, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 12.6488, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.03493125232255667, |
|
"grad_norm": 51.30817413330078, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 12.5156, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.03530286138981791, |
|
"grad_norm": 48.467193603515625, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 12.2167, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.03567447045707915, |
|
"grad_norm": 51.6811408996582, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 12.9945, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.03604607952434039, |
|
"grad_norm": 48.028831481933594, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 13.3435, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.03641768859160163, |
|
"grad_norm": 50.69572448730469, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 13.6653, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.03678929765886288, |
|
"grad_norm": 44.864540100097656, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 14.7863, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.03716090672612412, |
|
"grad_norm": 47.65779495239258, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 14.949, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03716090672612412, |
|
"eval_loss": 3.154662609100342, |
|
"eval_runtime": 52.0726, |
|
"eval_samples_per_second": 87.032, |
|
"eval_steps_per_second": 21.758, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03753251579338536, |
|
"grad_norm": 28.16967010498047, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 12.9067, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.0379041248606466, |
|
"grad_norm": 30.37758445739746, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 12.7569, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.03827573392790784, |
|
"grad_norm": 24.201208114624023, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 12.8335, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.03864734299516908, |
|
"grad_norm": 23.62079429626465, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 12.2592, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.03901895206243032, |
|
"grad_norm": 22.814987182617188, |
|
"learning_rate": 5e-05, |
|
"loss": 12.4559, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.03939056112969157, |
|
"grad_norm": 21.530044555664062, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 12.5896, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.03976217019695281, |
|
"grad_norm": 23.73486328125, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 12.3527, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.04013377926421405, |
|
"grad_norm": 21.59313201904297, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 12.1843, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.04050538833147529, |
|
"grad_norm": 23.252063751220703, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 12.442, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.04087699739873653, |
|
"grad_norm": 23.04511833190918, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 12.0954, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04124860646599777, |
|
"grad_norm": 21.15141487121582, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 12.3417, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.04162021553325901, |
|
"grad_norm": 22.450578689575195, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 12.1134, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.041991824600520256, |
|
"grad_norm": 23.66915512084961, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 12.4642, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.042363433667781496, |
|
"grad_norm": 24.237220764160156, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 12.2302, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.042735042735042736, |
|
"grad_norm": 22.29314422607422, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 12.2421, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.043106651802303976, |
|
"grad_norm": 25.172727584838867, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 12.1878, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.043478260869565216, |
|
"grad_norm": 26.602157592773438, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 12.2879, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.043849869936826456, |
|
"grad_norm": 24.090085983276367, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 12.4924, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.044221479004087696, |
|
"grad_norm": 26.16728973388672, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 12.145, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.044593088071348944, |
|
"grad_norm": 27.40218734741211, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 11.8146, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.044964697138610184, |
|
"grad_norm": 24.844179153442383, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 12.1576, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.045336306205871424, |
|
"grad_norm": 25.813631057739258, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 12.2498, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.045707915273132664, |
|
"grad_norm": 27.253564834594727, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 13.0846, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.046079524340393904, |
|
"grad_norm": 30.898393630981445, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 12.5038, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.046451133407655144, |
|
"grad_norm": 28.49885368347168, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 12.0529, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.046822742474916385, |
|
"grad_norm": 33.739097595214844, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 12.4644, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.04719435154217763, |
|
"grad_norm": 27.413183212280273, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 11.8988, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.04756596060943887, |
|
"grad_norm": 28.41815757751465, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 12.3175, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.04793756967670011, |
|
"grad_norm": 24.673856735229492, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 11.0224, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.04830917874396135, |
|
"grad_norm": 29.299766540527344, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 12.2135, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.04868078781122259, |
|
"grad_norm": 26.782230377197266, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 12.197, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.04905239687848383, |
|
"grad_norm": 40.575321197509766, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 12.0718, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.04942400594574508, |
|
"grad_norm": 24.29773712158203, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 12.2244, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.04979561501300632, |
|
"grad_norm": 25.015186309814453, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 12.3849, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.05016722408026756, |
|
"grad_norm": 36.70491409301758, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 11.7532, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.0505388331475288, |
|
"grad_norm": 29.158506393432617, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 12.1616, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.05091044221479004, |
|
"grad_norm": 25.96609115600586, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 12.0101, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.05128205128205128, |
|
"grad_norm": 32.55073547363281, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 12.3175, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.05165366034931252, |
|
"grad_norm": 28.77853775024414, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 12.2389, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.05202526941657377, |
|
"grad_norm": 29.920581817626953, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 11.7101, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05239687848383501, |
|
"grad_norm": 30.31206512451172, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 12.373, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.05276848755109625, |
|
"grad_norm": 34.891883850097656, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 12.0662, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.05314009661835749, |
|
"grad_norm": 40.02173614501953, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 12.6474, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.05351170568561873, |
|
"grad_norm": 36.588050842285156, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 13.1617, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.05388331475287997, |
|
"grad_norm": 34.155696868896484, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 12.7202, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.05425492382014121, |
|
"grad_norm": 33.35672378540039, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 11.7438, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.054626532887402456, |
|
"grad_norm": 34.478858947753906, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 13.309, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.054998141954663696, |
|
"grad_norm": 42.10382843017578, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 13.2585, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.055369751021924936, |
|
"grad_norm": 32.687721252441406, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 15.1013, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.055741360089186176, |
|
"grad_norm": 35.498931884765625, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 15.1974, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.055741360089186176, |
|
"eval_loss": 3.0939109325408936, |
|
"eval_runtime": 52.034, |
|
"eval_samples_per_second": 87.097, |
|
"eval_steps_per_second": 21.774, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.056112969156447416, |
|
"grad_norm": 18.009008407592773, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 12.5073, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.056484578223708656, |
|
"grad_norm": 19.124990463256836, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 12.8545, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.056856187290969896, |
|
"grad_norm": 19.760417938232422, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 12.2158, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.057227796358231144, |
|
"grad_norm": 19.974246978759766, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 12.0157, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.057599405425492384, |
|
"grad_norm": 19.707813262939453, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 12.3223, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.057971014492753624, |
|
"grad_norm": 19.958818435668945, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 12.4417, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.058342623560014864, |
|
"grad_norm": 24.03920555114746, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 12.352, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.058714232627276104, |
|
"grad_norm": 20.127941131591797, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 12.2601, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.059085841694537344, |
|
"grad_norm": 21.126739501953125, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 12.4955, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.059457450761798585, |
|
"grad_norm": 21.151691436767578, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 11.9164, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.05982905982905983, |
|
"grad_norm": 20.782501220703125, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 12.0629, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.06020066889632107, |
|
"grad_norm": 21.038799285888672, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 12.1151, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.06057227796358231, |
|
"grad_norm": 21.63349151611328, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 12.1234, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.06094388703084355, |
|
"grad_norm": 20.985212326049805, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 12.1586, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.06131549609810479, |
|
"grad_norm": 20.65945816040039, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 11.9846, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.06168710516536603, |
|
"grad_norm": 19.89647674560547, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 12.38, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.06205871423262728, |
|
"grad_norm": 18.34578514099121, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 11.904, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.06243032329988852, |
|
"grad_norm": 20.550670623779297, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 11.9591, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.06280193236714976, |
|
"grad_norm": 22.544872283935547, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 11.61, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.06317354143441099, |
|
"grad_norm": 21.69040298461914, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 12.3752, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06354515050167224, |
|
"grad_norm": 23.17534828186035, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 11.8754, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.06391675956893349, |
|
"grad_norm": 24.032636642456055, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 12.1612, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.06428836863619472, |
|
"grad_norm": 25.65900421142578, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 12.5446, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.06465997770345597, |
|
"grad_norm": 22.62616539001465, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 12.5365, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.0650315867707172, |
|
"grad_norm": 22.821971893310547, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 11.96, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.06540319583797845, |
|
"grad_norm": 25.016874313354492, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 12.2364, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.06577480490523968, |
|
"grad_norm": 21.750635147094727, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 12.28, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.06614641397250093, |
|
"grad_norm": 23.131282806396484, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 12.5989, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.06651802303976218, |
|
"grad_norm": 30.947845458984375, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 12.5971, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.06688963210702341, |
|
"grad_norm": 24.951862335205078, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 11.8475, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.06726124117428466, |
|
"grad_norm": 26.0683536529541, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 12.5458, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.06763285024154589, |
|
"grad_norm": 23.516315460205078, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 11.5941, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.06800445930880714, |
|
"grad_norm": 27.001205444335938, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 12.2687, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.06837606837606838, |
|
"grad_norm": 27.13043212890625, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 11.9019, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.06874767744332962, |
|
"grad_norm": 24.126502990722656, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 11.2569, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.06911928651059086, |
|
"grad_norm": 27.644119262695312, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 12.1752, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.0694908955778521, |
|
"grad_norm": 28.741046905517578, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 12.3401, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.06986250464511334, |
|
"grad_norm": 29.9339542388916, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 12.243, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.07023411371237458, |
|
"grad_norm": 28.82630157470703, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 12.8568, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.07060572277963582, |
|
"grad_norm": 36.50189208984375, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 12.9596, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07097733184689707, |
|
"grad_norm": 37.89533233642578, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 13.0177, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.0713489409141583, |
|
"grad_norm": 29.354293823242188, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 11.5076, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.07172054998141955, |
|
"grad_norm": 45.329193115234375, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 11.8622, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.07209215904868078, |
|
"grad_norm": 34.94525909423828, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 12.3988, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.07246376811594203, |
|
"grad_norm": 29.820608139038086, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 12.1844, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.07283537718320326, |
|
"grad_norm": 30.504676818847656, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 12.6609, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.07320698625046451, |
|
"grad_norm": 31.953720092773438, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 12.272, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.07357859531772576, |
|
"grad_norm": 41.56123733520508, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 13.9901, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.07395020438498699, |
|
"grad_norm": 36.15668869018555, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 13.9894, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.07432181345224824, |
|
"grad_norm": 40.40428161621094, |
|
"learning_rate": 0.0, |
|
"loss": 15.0873, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07432181345224824, |
|
"eval_loss": 3.076195240020752, |
|
"eval_runtime": 51.9862, |
|
"eval_samples_per_second": 87.177, |
|
"eval_steps_per_second": 21.794, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.4939692021579776e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|