| { | |
| "best_metric": 0.7730463326325963, | |
| "best_model_checkpoint": "2-en-ru-**1-wikispan-*unsup-ensemble-last-64-768-6*-64-768-3e-5-8600/checkpoint-1800**-64-128-3e-5-2600/checkpoint-2501", | |
| "epoch": 1.0, | |
| "global_step": 2501, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.5e-05, | |
| "loss": 34.7488, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 3e-05, | |
| "loss": 22.2167, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2.875e-05, | |
| "loss": 21.4138, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 2.75e-05, | |
| "loss": 18.6706, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 2.625e-05, | |
| "loss": 18.0538, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 2.5e-05, | |
| "loss": 17.5283, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 2.3749999999999998e-05, | |
| "loss": 15.965, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 2.25e-05, | |
| "loss": 15.592, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 2.125e-05, | |
| "loss": 15.0616, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.9999999999999998e-05, | |
| "loss": 15.159, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.8750000000000002e-05, | |
| "loss": 14.4158, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.7500000000000002e-05, | |
| "loss": 13.5464, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.625e-05, | |
| "loss": 13.3597, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.5e-05, | |
| "loss": 13.6918, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.375e-05, | |
| "loss": 12.71, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.25e-05, | |
| "loss": 12.3813, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.125e-05, | |
| "loss": 12.142, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.999999999999999e-06, | |
| "loss": 11.8619, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 8.750000000000001e-06, | |
| "loss": 11.7191, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 7.5e-06, | |
| "loss": 10.7367, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 6.25e-06, | |
| "loss": 11.5787, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.9999999999999996e-06, | |
| "loss": 11.0384, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.75e-06, | |
| "loss": 11.3574, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.4999999999999998e-06, | |
| "loss": 11.1003, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.2499999999999999e-06, | |
| "loss": 11.352, | |
| "step": 2500 | |
| } | |
| ], | |
| "max_steps": 2600, | |
| "num_train_epochs": 2, | |
| "total_flos": 165708798492672.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |