|
{ |
|
"best_metric": 0.6734878420829773, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.36363636363636365, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0018181818181818182, |
|
"grad_norm": 138.68397521972656, |
|
"learning_rate": 5e-06, |
|
"loss": 14.5689, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0018181818181818182, |
|
"eval_loss": 4.257202625274658, |
|
"eval_runtime": 15.5048, |
|
"eval_samples_per_second": 59.788, |
|
"eval_steps_per_second": 29.926, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0036363636363636364, |
|
"grad_norm": 122.03857421875, |
|
"learning_rate": 1e-05, |
|
"loss": 15.5416, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005454545454545455, |
|
"grad_norm": 139.68202209472656, |
|
"learning_rate": 1.5e-05, |
|
"loss": 16.1754, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.007272727272727273, |
|
"grad_norm": 154.2438507080078, |
|
"learning_rate": 2e-05, |
|
"loss": 15.2385, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00909090909090909, |
|
"grad_norm": 146.93878173828125, |
|
"learning_rate": 2.5e-05, |
|
"loss": 15.5613, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01090909090909091, |
|
"grad_norm": 120.0749282836914, |
|
"learning_rate": 3e-05, |
|
"loss": 14.4842, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012727272727272728, |
|
"grad_norm": 126.85943603515625, |
|
"learning_rate": 3.5e-05, |
|
"loss": 13.4979, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.014545454545454545, |
|
"grad_norm": 175.2422637939453, |
|
"learning_rate": 4e-05, |
|
"loss": 12.2678, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.016363636363636365, |
|
"grad_norm": 129.5550537109375, |
|
"learning_rate": 4.5e-05, |
|
"loss": 11.7683, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01818181818181818, |
|
"grad_norm": 114.66398620605469, |
|
"learning_rate": 5e-05, |
|
"loss": 10.7645, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 109.88460540771484, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 9.8579, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02181818181818182, |
|
"grad_norm": 122.863037109375, |
|
"learning_rate": 6e-05, |
|
"loss": 9.304, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.023636363636363636, |
|
"grad_norm": 123.73020935058594, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 8.3192, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.025454545454545455, |
|
"grad_norm": 147.00076293945312, |
|
"learning_rate": 7e-05, |
|
"loss": 7.907, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02727272727272727, |
|
"grad_norm": 338.5273132324219, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 8.9278, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02909090909090909, |
|
"grad_norm": 143.44017028808594, |
|
"learning_rate": 8e-05, |
|
"loss": 7.7997, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03090909090909091, |
|
"grad_norm": 175.75901794433594, |
|
"learning_rate": 8.5e-05, |
|
"loss": 7.0567, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03272727272727273, |
|
"grad_norm": 161.01739501953125, |
|
"learning_rate": 9e-05, |
|
"loss": 7.9452, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.034545454545454546, |
|
"grad_norm": 217.87303161621094, |
|
"learning_rate": 9.5e-05, |
|
"loss": 8.277, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03636363636363636, |
|
"grad_norm": 193.74627685546875, |
|
"learning_rate": 0.0001, |
|
"loss": 7.6536, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.038181818181818185, |
|
"grad_norm": 253.91064453125, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 6.6774, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 213.30625915527344, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 6.5032, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04181818181818182, |
|
"grad_norm": 161.59420776367188, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 6.4562, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04363636363636364, |
|
"grad_norm": 163.30369567871094, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 6.051, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.045454545454545456, |
|
"grad_norm": 164.57496643066406, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 5.5738, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04727272727272727, |
|
"grad_norm": 137.8650360107422, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 5.9991, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04909090909090909, |
|
"grad_norm": 147.88990783691406, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 5.2916, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05090909090909091, |
|
"grad_norm": 175.40298461914062, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 5.123, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.05272727272727273, |
|
"grad_norm": 143.73516845703125, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 4.696, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05454545454545454, |
|
"grad_norm": 162.42837524414062, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 4.4559, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.056363636363636366, |
|
"grad_norm": 138.40577697753906, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 5.1119, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05818181818181818, |
|
"grad_norm": 152.168212890625, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 4.7344, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 221.61441040039062, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 4.8725, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06181818181818182, |
|
"grad_norm": 134.0053253173828, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 4.1396, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06363636363636363, |
|
"grad_norm": 169.15768432617188, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 4.7024, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06545454545454546, |
|
"grad_norm": 295.5130310058594, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 4.3401, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06727272727272728, |
|
"grad_norm": 170.70733642578125, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 5.3419, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06909090909090909, |
|
"grad_norm": 150.62545776367188, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 4.5806, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07090909090909091, |
|
"grad_norm": 159.96763610839844, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 4.1185, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07272727272727272, |
|
"grad_norm": 145.9441680908203, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 4.1673, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07454545454545454, |
|
"grad_norm": 107.2767333984375, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 4.7757, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07636363636363637, |
|
"grad_norm": 204.49026489257812, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 4.7596, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07818181818181819, |
|
"grad_norm": 115.9788818359375, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 5.3676, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 70.77046966552734, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 3.8195, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08181818181818182, |
|
"grad_norm": 74.69246673583984, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 4.8097, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08363636363636363, |
|
"grad_norm": 90.72003173828125, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 5.028, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08545454545454545, |
|
"grad_norm": 85.53260803222656, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 4.1861, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08727272727272728, |
|
"grad_norm": 90.01811981201172, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 4.2923, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0890909090909091, |
|
"grad_norm": 103.90205383300781, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 4.0331, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 54.6790657043457, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 5.0269, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"eval_loss": 1.0066642761230469, |
|
"eval_runtime": 15.4751, |
|
"eval_samples_per_second": 59.903, |
|
"eval_steps_per_second": 29.984, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09272727272727273, |
|
"grad_norm": 134.6455535888672, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 3.991, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09454545454545454, |
|
"grad_norm": 106.82148742675781, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 3.4738, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09636363636363636, |
|
"grad_norm": 75.30558013916016, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 3.722, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.09818181818181818, |
|
"grad_norm": 73.11234283447266, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 2.7794, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 47.96721267700195, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 3.1889, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.10181818181818182, |
|
"grad_norm": 55.4221076965332, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 3.0504, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10363636363636364, |
|
"grad_norm": 85.66980743408203, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 3.4424, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10545454545454545, |
|
"grad_norm": 73.78186798095703, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 2.7928, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10727272727272727, |
|
"grad_norm": 60.2110481262207, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 3.0366, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10909090909090909, |
|
"grad_norm": 95.8529052734375, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 3.5818, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11090909090909092, |
|
"grad_norm": 89.5805892944336, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 2.9999, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.11272727272727273, |
|
"grad_norm": 45.49700164794922, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 3.0906, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11454545454545455, |
|
"grad_norm": 66.79446411132812, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 2.9276, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11636363636363636, |
|
"grad_norm": 47.410247802734375, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 2.6777, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11818181818181818, |
|
"grad_norm": 46.62559509277344, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 2.6861, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 51.441925048828125, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 3.3482, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12181818181818181, |
|
"grad_norm": 96.47298431396484, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 2.9825, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12363636363636364, |
|
"grad_norm": 34.14326477050781, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 2.9371, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12545454545454546, |
|
"grad_norm": 57.666561126708984, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 4.0862, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12727272727272726, |
|
"grad_norm": 40.737274169921875, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 3.4629, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1290909090909091, |
|
"grad_norm": 30.08909034729004, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 3.2942, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13090909090909092, |
|
"grad_norm": 37.007659912109375, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 3.4122, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13272727272727272, |
|
"grad_norm": 40.06611251831055, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 3.4205, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13454545454545455, |
|
"grad_norm": 32.19481658935547, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 3.5059, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13636363636363635, |
|
"grad_norm": 37.26934814453125, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 3.2033, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13818181818181818, |
|
"grad_norm": 34.74386978149414, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 3.1631, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 30.365886688232422, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 2.9813, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14181818181818182, |
|
"grad_norm": 38.53085708618164, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 3.1473, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14363636363636365, |
|
"grad_norm": 58.952613830566406, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 3.3567, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.14545454545454545, |
|
"grad_norm": 35.19995880126953, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 3.1655, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14727272727272728, |
|
"grad_norm": 42.377830505371094, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 3.6093, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14909090909090908, |
|
"grad_norm": 36.57028579711914, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 3.458, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1509090909090909, |
|
"grad_norm": 51.59428405761719, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 3.4229, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15272727272727274, |
|
"grad_norm": 39.17350769042969, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 3.0197, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.15454545454545454, |
|
"grad_norm": 41.6758918762207, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 3.0252, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15636363636363637, |
|
"grad_norm": 46.32060241699219, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 2.81, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15818181818181817, |
|
"grad_norm": 63.42094421386719, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 3.1065, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 40.31554412841797, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 3.1684, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1618181818181818, |
|
"grad_norm": 47.06768035888672, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 3.2292, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.16363636363636364, |
|
"grad_norm": 44.603546142578125, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 3.0992, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.16545454545454547, |
|
"grad_norm": 40.659542083740234, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 3.1235, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16727272727272727, |
|
"grad_norm": 51.51438522338867, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 3.1261, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.1690909090909091, |
|
"grad_norm": 35.61064910888672, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 3.3095, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.1709090909090909, |
|
"grad_norm": 33.06657409667969, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 3.1334, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.17272727272727273, |
|
"grad_norm": 42.53789520263672, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 3.1923, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17454545454545456, |
|
"grad_norm": 63.94884490966797, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 3.2337, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17636363636363636, |
|
"grad_norm": 44.34423828125, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 3.6265, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1781818181818182, |
|
"grad_norm": 42.41252899169922, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 4.2179, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 39.532737731933594, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 3.535, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 41.67788314819336, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 4.1957, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"eval_loss": 0.7785072326660156, |
|
"eval_runtime": 15.6483, |
|
"eval_samples_per_second": 59.24, |
|
"eval_steps_per_second": 29.652, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.18363636363636363, |
|
"grad_norm": 83.87976837158203, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 2.4931, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.18545454545454546, |
|
"grad_norm": 62.57492446899414, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 2.7229, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.18727272727272729, |
|
"grad_norm": 44.38495635986328, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 2.6211, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1890909090909091, |
|
"grad_norm": 54.712833404541016, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 2.1997, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.19090909090909092, |
|
"grad_norm": 49.44144058227539, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 2.1469, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.19272727272727272, |
|
"grad_norm": 44.76526641845703, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 2.7389, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.19454545454545455, |
|
"grad_norm": 47.95205307006836, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 2.6509, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.19636363636363635, |
|
"grad_norm": 43.48308181762695, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 2.8425, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.19818181818181818, |
|
"grad_norm": 45.31011962890625, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 2.9956, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 47.43437957763672, |
|
"learning_rate": 5e-05, |
|
"loss": 2.7493, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2018181818181818, |
|
"grad_norm": 40.40293502807617, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 2.8543, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.20363636363636364, |
|
"grad_norm": 49.42204666137695, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 3.1348, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.20545454545454545, |
|
"grad_norm": 42.550025939941406, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 2.4712, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.20727272727272728, |
|
"grad_norm": 39.86064147949219, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 2.49, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.20909090909090908, |
|
"grad_norm": 39.12519454956055, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 2.7172, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2109090909090909, |
|
"grad_norm": 33.93404769897461, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 2.3934, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.21272727272727274, |
|
"grad_norm": 42.115943908691406, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 2.597, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.21454545454545454, |
|
"grad_norm": 61.48637771606445, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 2.6182, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.21636363636363637, |
|
"grad_norm": 37.781768798828125, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 2.5236, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.21818181818181817, |
|
"grad_norm": 40.604156494140625, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 3.1361, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 43.32360076904297, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 3.1341, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.22181818181818183, |
|
"grad_norm": 40.844425201416016, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 2.4682, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.22363636363636363, |
|
"grad_norm": 80.41752624511719, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 2.5737, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.22545454545454546, |
|
"grad_norm": 48.227821350097656, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 3.3177, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.22727272727272727, |
|
"grad_norm": 45.7944221496582, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 2.9183, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2290909090909091, |
|
"grad_norm": 51.166900634765625, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 2.6203, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.2309090909090909, |
|
"grad_norm": 46.844322204589844, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 2.8397, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.23272727272727273, |
|
"grad_norm": 61.02446365356445, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 2.8248, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.23454545454545456, |
|
"grad_norm": 65.25340270996094, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 3.2427, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.23636363636363636, |
|
"grad_norm": 60.50672912597656, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 3.0476, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2381818181818182, |
|
"grad_norm": 58.63713836669922, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 3.0828, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 69.12300872802734, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 3.5299, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.24181818181818182, |
|
"grad_norm": 43.3862419128418, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 2.9089, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.24363636363636362, |
|
"grad_norm": 55.379859924316406, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 2.5253, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.24545454545454545, |
|
"grad_norm": 46.44770812988281, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 2.6508, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.24727272727272728, |
|
"grad_norm": 94.80418395996094, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 3.1445, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.24909090909090909, |
|
"grad_norm": 45.59507369995117, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 2.6856, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.2509090909090909, |
|
"grad_norm": 41.788909912109375, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 3.0028, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.25272727272727274, |
|
"grad_norm": 48.725833892822266, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 3.396, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2545454545454545, |
|
"grad_norm": 41.74631881713867, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 3.4218, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.25636363636363635, |
|
"grad_norm": 39.308189392089844, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 2.4569, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2581818181818182, |
|
"grad_norm": 48.53035354614258, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 3.0169, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 58.768245697021484, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 3.017, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.26181818181818184, |
|
"grad_norm": 42.100189208984375, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 2.8048, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.2636363636363636, |
|
"grad_norm": 48.90083312988281, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 2.9452, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.26545454545454544, |
|
"grad_norm": 57.28050231933594, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 3.3012, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2672727272727273, |
|
"grad_norm": 41.389217376708984, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 3.5768, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2690909090909091, |
|
"grad_norm": 40.396934509277344, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 3.341, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.27090909090909093, |
|
"grad_norm": 42.712406158447266, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 3.2837, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 51.320068359375, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 3.9642, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"eval_loss": 0.6896986961364746, |
|
"eval_runtime": 15.5088, |
|
"eval_samples_per_second": 59.772, |
|
"eval_steps_per_second": 29.918, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.27454545454545454, |
|
"grad_norm": 123.20899963378906, |
|
"learning_rate": 1.7197048550474643e-05, |
|
"loss": 1.9542, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.27636363636363637, |
|
"grad_norm": 71.69274139404297, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 2.2125, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2781818181818182, |
|
"grad_norm": 104.9278564453125, |
|
"learning_rate": 1.5900081996875083e-05, |
|
"loss": 1.8621, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 59.245018005371094, |
|
"learning_rate": 1.526708147705013e-05, |
|
"loss": 2.1506, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2818181818181818, |
|
"grad_norm": 123.07303619384766, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 1.8345, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.28363636363636363, |
|
"grad_norm": 79.02330017089844, |
|
"learning_rate": 1.4033009983067452e-05, |
|
"loss": 2.8691, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.28545454545454546, |
|
"grad_norm": 80.06781005859375, |
|
"learning_rate": 1.3432314919041478e-05, |
|
"loss": 1.9592, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.2872727272727273, |
|
"grad_norm": 73.02725219726562, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 2.6779, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.28909090909090907, |
|
"grad_norm": 50.68994140625, |
|
"learning_rate": 1.22645209888614e-05, |
|
"loss": 2.3196, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2909090909090909, |
|
"grad_norm": 72.57898712158203, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 2.3827, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2927272727272727, |
|
"grad_norm": 82.22683715820312, |
|
"learning_rate": 1.1142701927151456e-05, |
|
"loss": 2.4989, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.29454545454545455, |
|
"grad_norm": 59.47288131713867, |
|
"learning_rate": 1.0599462319663905e-05, |
|
"loss": 2.3806, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2963636363636364, |
|
"grad_norm": 55.69282531738281, |
|
"learning_rate": 1.006822449763537e-05, |
|
"loss": 2.0908, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.29818181818181816, |
|
"grad_norm": 49.40199661254883, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 2.0631, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 81.0521240234375, |
|
"learning_rate": 9.042397785550405e-06, |
|
"loss": 2.9297, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3018181818181818, |
|
"grad_norm": 47.986270904541016, |
|
"learning_rate": 8.548121372247918e-06, |
|
"loss": 2.662, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.30363636363636365, |
|
"grad_norm": 67.48284912109375, |
|
"learning_rate": 8.066471602728803e-06, |
|
"loss": 2.609, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3054545454545455, |
|
"grad_norm": 58.087398529052734, |
|
"learning_rate": 7.597595192178702e-06, |
|
"loss": 2.5202, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.30727272727272725, |
|
"grad_norm": 57.88316345214844, |
|
"learning_rate": 7.1416349648943894e-06, |
|
"loss": 2.3587, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3090909090909091, |
|
"grad_norm": 100.48794555664062, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 2.3491, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3109090909090909, |
|
"grad_norm": 59.3438606262207, |
|
"learning_rate": 6.269014643030213e-06, |
|
"loss": 2.7131, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.31272727272727274, |
|
"grad_norm": 53.041725158691406, |
|
"learning_rate": 5.852620357053651e-06, |
|
"loss": 3.0228, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3145454545454546, |
|
"grad_norm": 45.357826232910156, |
|
"learning_rate": 5.449673790581611e-06, |
|
"loss": 2.0914, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.31636363636363635, |
|
"grad_norm": 41.874752044677734, |
|
"learning_rate": 5.060297685041659e-06, |
|
"loss": 1.9584, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3181818181818182, |
|
"grad_norm": 47.15371322631836, |
|
"learning_rate": 4.684610648167503e-06, |
|
"loss": 2.3451, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 42.890743255615234, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 2.4312, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.32181818181818184, |
|
"grad_norm": 115.22737121582031, |
|
"learning_rate": 3.974757327377981e-06, |
|
"loss": 2.4122, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3236363636363636, |
|
"grad_norm": 44.23305892944336, |
|
"learning_rate": 3.6408072716606346e-06, |
|
"loss": 2.2942, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.32545454545454544, |
|
"grad_norm": 41.19794464111328, |
|
"learning_rate": 3.3209786751399187e-06, |
|
"loss": 3.0775, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.32727272727272727, |
|
"grad_norm": 67.62647247314453, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 2.9131, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3290909090909091, |
|
"grad_norm": 42.33940124511719, |
|
"learning_rate": 2.724071220034158e-06, |
|
"loss": 2.1782, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.33090909090909093, |
|
"grad_norm": 54.356407165527344, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 2.4858, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.3327272727272727, |
|
"grad_norm": 42.20392990112305, |
|
"learning_rate": 2.1847622018482283e-06, |
|
"loss": 2.5662, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.33454545454545453, |
|
"grad_norm": 46.561893463134766, |
|
"learning_rate": 1.9369152030840556e-06, |
|
"loss": 2.6082, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.33636363636363636, |
|
"grad_norm": 53.607418060302734, |
|
"learning_rate": 1.70370868554659e-06, |
|
"loss": 2.8982, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3381818181818182, |
|
"grad_norm": 49.72503662109375, |
|
"learning_rate": 1.4852136862001764e-06, |
|
"loss": 2.9618, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 41.763607025146484, |
|
"learning_rate": 1.2814967607382432e-06, |
|
"loss": 3.0604, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3418181818181818, |
|
"grad_norm": 48.85137939453125, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 3.3131, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.34363636363636363, |
|
"grad_norm": 42.69316101074219, |
|
"learning_rate": 9.186408276168013e-07, |
|
"loss": 2.9401, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.34545454545454546, |
|
"grad_norm": 57.626651763916016, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 2.1762, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3472727272727273, |
|
"grad_norm": 44.62453842163086, |
|
"learning_rate": 6.15582970243117e-07, |
|
"loss": 2.9992, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.3490909090909091, |
|
"grad_norm": 48.011863708496094, |
|
"learning_rate": 4.865965629214819e-07, |
|
"loss": 2.984, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3509090909090909, |
|
"grad_norm": 50.390472412109375, |
|
"learning_rate": 3.7269241793390085e-07, |
|
"loss": 2.8539, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.3527272727272727, |
|
"grad_norm": 44.408843994140625, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 2.7273, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.35454545454545455, |
|
"grad_norm": 49.21343994140625, |
|
"learning_rate": 1.9026509541272275e-07, |
|
"loss": 3.5664, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3563636363636364, |
|
"grad_norm": 48.34445571899414, |
|
"learning_rate": 1.2179748700879012e-07, |
|
"loss": 3.0188, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.35818181818181816, |
|
"grad_norm": 33.7151985168457, |
|
"learning_rate": 6.852326227130834e-08, |
|
"loss": 3.038, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 40.66362762451172, |
|
"learning_rate": 3.04586490452119e-08, |
|
"loss": 2.9837, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.3618181818181818, |
|
"grad_norm": 37.93846893310547, |
|
"learning_rate": 7.615242180436522e-09, |
|
"loss": 3.1249, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 36.60719299316406, |
|
"learning_rate": 0.0, |
|
"loss": 3.1517, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"eval_loss": 0.6734878420829773, |
|
"eval_runtime": 15.5531, |
|
"eval_samples_per_second": 59.602, |
|
"eval_steps_per_second": 29.833, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.49024359317504e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|