|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"global_step": 22128, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.9322125813449026e-05, |
|
"loss": 2.223, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.8644251626898048e-05, |
|
"loss": 1.5712, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.7966377440347073e-05, |
|
"loss": 1.4365, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.7288503253796095e-05, |
|
"loss": 1.3255, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.661062906724512e-05, |
|
"loss": 1.2879, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.5932754880694143e-05, |
|
"loss": 1.2262, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.5254880694143168e-05, |
|
"loss": 1.2299, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.457700650759219e-05, |
|
"loss": 1.1777, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.3899132321041215e-05, |
|
"loss": 1.1717, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.322125813449024e-05, |
|
"loss": 1.1411, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.2543383947939266e-05, |
|
"loss": 1.1099, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1865509761388288e-05, |
|
"loss": 1.1054, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.1187635574837313e-05, |
|
"loss": 1.0961, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 2.0509761388286335e-05, |
|
"loss": 1.0753, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.983188720173536e-05, |
|
"loss": 1.0311, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.9154013015184382e-05, |
|
"loss": 1.0377, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.8476138828633408e-05, |
|
"loss": 1.0191, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.779826464208243e-05, |
|
"loss": 0.9882, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.7120390455531455e-05, |
|
"loss": 0.9928, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6442516268980477e-05, |
|
"loss": 0.9701, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5764642082429502e-05, |
|
"loss": 0.9901, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5086767895878524e-05, |
|
"loss": 0.9647, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.4408893709327548e-05, |
|
"loss": 0.784, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.3731019522776571e-05, |
|
"loss": 0.7568, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.3053145336225597e-05, |
|
"loss": 0.7582, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.237527114967462e-05, |
|
"loss": 0.7732, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.1697396963123646e-05, |
|
"loss": 0.7331, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.101952277657267e-05, |
|
"loss": 0.7623, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.0341648590021693e-05, |
|
"loss": 0.7403, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 9.663774403470717e-06, |
|
"loss": 0.7096, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 8.98590021691974e-06, |
|
"loss": 0.7049, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 8.308026030368764e-06, |
|
"loss": 0.7097, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 7.630151843817788e-06, |
|
"loss": 0.7169, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 6.952277657266811e-06, |
|
"loss": 0.7172, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 6.274403470715835e-06, |
|
"loss": 0.7446, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 5.5965292841648585e-06, |
|
"loss": 0.6982, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.918655097613883e-06, |
|
"loss": 0.6878, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.240780911062907e-06, |
|
"loss": 0.6856, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.5629067245119307e-06, |
|
"loss": 0.6951, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.8850325379609543e-06, |
|
"loss": 0.6837, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.2071583514099784e-06, |
|
"loss": 0.6826, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.529284164859002e-06, |
|
"loss": 0.6854, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 8.514099783080261e-07, |
|
"loss": 0.6944, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.735357917570499e-07, |
|
"loss": 0.6932, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 22128, |
|
"total_flos": 6.662030063085158e+16, |
|
"train_loss": 0.9526093118189042, |
|
"train_runtime": 484536.8861, |
|
"train_samples_per_second": 0.548, |
|
"train_steps_per_second": 0.046 |
|
} |
|
], |
|
"max_steps": 22128, |
|
"num_train_epochs": 2, |
|
"total_flos": 6.662030063085158e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|