| { | |
| "best_metric": 0.7689279416122303, | |
| "best_model_checkpoint": "2-en-zh-**1-wikispan-*unsup-ensemble-last-64-768-6*-64-768-3e-5-8600/checkpoint-1800**-64-128-3e-5-2600/checkpoint-2501", | |
| "epoch": 1.0, | |
| "global_step": 2501, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.5e-05, | |
| "loss": 35.0851, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 3e-05, | |
| "loss": 22.8576, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2.875e-05, | |
| "loss": 22.0178, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 2.75e-05, | |
| "loss": 18.8745, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 2.625e-05, | |
| "loss": 18.4235, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 2.5e-05, | |
| "loss": 17.877, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 2.3749999999999998e-05, | |
| "loss": 16.3567, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 2.25e-05, | |
| "loss": 15.9635, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 2.125e-05, | |
| "loss": 15.444, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.9999999999999998e-05, | |
| "loss": 15.3217, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.8750000000000002e-05, | |
| "loss": 14.6601, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.7500000000000002e-05, | |
| "loss": 13.8598, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.625e-05, | |
| "loss": 13.726, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.5e-05, | |
| "loss": 13.9364, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.375e-05, | |
| "loss": 13.0504, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.25e-05, | |
| "loss": 12.7187, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.125e-05, | |
| "loss": 12.3489, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.999999999999999e-06, | |
| "loss": 12.1597, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 8.750000000000001e-06, | |
| "loss": 11.9908, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 7.5e-06, | |
| "loss": 11.0219, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 6.25e-06, | |
| "loss": 12.0322, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.9999999999999996e-06, | |
| "loss": 11.1851, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.75e-06, | |
| "loss": 11.6679, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.4999999999999998e-06, | |
| "loss": 11.3133, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.2499999999999999e-06, | |
| "loss": 11.6247, | |
| "step": 2500 | |
| } | |
| ], | |
| "max_steps": 2600, | |
| "num_train_epochs": 2, | |
| "total_flos": 165708798492672.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |