| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.32362459546925565, | |
| "eval_steps": 100, | |
| "global_step": 200, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 5e-05, | |
| "loss": 1.9981, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9999675930251536e-05, | |
| "loss": 2.0613, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99987037294078e-05, | |
| "loss": 1.8228, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.99970834226737e-05, | |
| "loss": 1.707, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.999481505205661e-05, | |
| "loss": 1.5271, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.999189867636535e-05, | |
| "loss": 1.4562, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.998833437120866e-05, | |
| "loss": 1.3805, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.998412222899321e-05, | |
| "loss": 1.2998, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.997926235892124e-05, | |
| "loss": 1.4383, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.997375488698769e-05, | |
| "loss": 1.2441, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.996759995597697e-05, | |
| "loss": 1.2275, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.996079772545923e-05, | |
| "loss": 1.2233, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.995334837178625e-05, | |
| "loss": 1.1886, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9945252088086825e-05, | |
| "loss": 1.1779, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.993650908426182e-05, | |
| "loss": 1.1122, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.992711958697868e-05, | |
| "loss": 1.0766, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.991708383966556e-05, | |
| "loss": 1.0836, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.9906402102505026e-05, | |
| "loss": 0.9728, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.989507465242732e-05, | |
| "loss": 0.9883, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.988310178310315e-05, | |
| "loss": 1.0307, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.9870483804936084e-05, | |
| "loss": 0.9613, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9857221045054535e-05, | |
| "loss": 1.1038, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9843313847303246e-05, | |
| "loss": 0.9206, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9828762572234374e-05, | |
| "loss": 0.9893, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9813567597098166e-05, | |
| "loss": 0.8328, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.979772931583317e-05, | |
| "loss": 0.8712, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.978124813905599e-05, | |
| "loss": 0.8031, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.976412449405072e-05, | |
| "loss": 0.7675, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.974635882475778e-05, | |
| "loss": 0.7851, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.972795159176243e-05, | |
| "loss": 0.7447, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9708903272282884e-05, | |
| "loss": 0.7719, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9689214360157844e-05, | |
| "loss": 0.7142, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9668885365833795e-05, | |
| "loss": 0.8649, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.96479168163517e-05, | |
| "loss": 0.7574, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.9626309255333346e-05, | |
| "loss": 0.9205, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.9604063242967315e-05, | |
| "loss": 0.7987, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.958117935599434e-05, | |
| "loss": 0.7626, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.955765818769249e-05, | |
| "loss": 0.8332, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.9533500347861675e-05, | |
| "loss": 0.9178, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.950870646280791e-05, | |
| "loss": 0.9919, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.948327717532705e-05, | |
| "loss": 0.7098, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.9457213144688095e-05, | |
| "loss": 0.7552, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.9430515046616175e-05, | |
| "loss": 0.7496, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.940318357327495e-05, | |
| "loss": 0.7667, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.937521943324873e-05, | |
| "loss": 0.8685, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.934662335152405e-05, | |
| "loss": 0.6715, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.931739606947091e-05, | |
| "loss": 0.8149, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.9287538344823544e-05, | |
| "loss": 0.7346, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.925705095166079e-05, | |
| "loss": 0.7803, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.922593468038599e-05, | |
| "loss": 0.7451, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.919419033770652e-05, | |
| "loss": 0.8402, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.916181874661286e-05, | |
| "loss": 0.7988, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.91288207463573e-05, | |
| "loss": 0.7444, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.9095197192432105e-05, | |
| "loss": 0.8545, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.906094895654744e-05, | |
| "loss": 0.761, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.902607692660865e-05, | |
| "loss": 0.6741, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.8990582006693365e-05, | |
| "loss": 0.8457, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.895446511702793e-05, | |
| "loss": 0.8096, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.891772719396369e-05, | |
| "loss": 0.7989, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.888036918995258e-05, | |
| "loss": 0.7683, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.884239207352252e-05, | |
| "loss": 0.7912, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.880379682925228e-05, | |
| "loss": 0.7417, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.876458445774594e-05, | |
| "loss": 0.7511, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.872475597560699e-05, | |
| "loss": 0.8021, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.8684312415411897e-05, | |
| "loss": 0.8154, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.864325482568344e-05, | |
| "loss": 0.7109, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.860158427086341e-05, | |
| "loss": 0.7915, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.855930183128513e-05, | |
| "loss": 0.6363, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.851640860314536e-05, | |
| "loss": 0.6987, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.8472905698475906e-05, | |
| "loss": 0.6498, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.84287942451148e-05, | |
| "loss": 0.7768, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.8384075386677054e-05, | |
| "loss": 0.7979, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.833875028252499e-05, | |
| "loss": 0.7611, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.8292820107738235e-05, | |
| "loss": 0.7889, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.824628605308319e-05, | |
| "loss": 0.6706, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.819914932498222e-05, | |
| "loss": 0.7762, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.815141114548232e-05, | |
| "loss": 0.7517, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.8103072752223486e-05, | |
| "loss": 0.7793, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.805413539840659e-05, | |
| "loss": 0.7306, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.80046003527609e-05, | |
| "loss": 0.834, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.7954468899511215e-05, | |
| "loss": 0.8076, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.790374233834452e-05, | |
| "loss": 0.8375, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.7852421984376324e-05, | |
| "loss": 0.7839, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.780050916811658e-05, | |
| "loss": 0.6221, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.7748005235435137e-05, | |
| "loss": 0.7212, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.76949115475269e-05, | |
| "loss": 0.6369, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.7641229480876515e-05, | |
| "loss": 0.7167, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.758696042722269e-05, | |
| "loss": 0.6908, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.753210579352211e-05, | |
| "loss": 0.6681, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.747666700191297e-05, | |
| "loss": 0.6566, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.7420645489678076e-05, | |
| "loss": 0.6869, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.7364042709207626e-05, | |
| "loss": 0.7106, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.730686012796153e-05, | |
| "loss": 0.6782, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.724909922843136e-05, | |
| "loss": 0.7148, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.719076150810193e-05, | |
| "loss": 0.8887, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.7131848479412476e-05, | |
| "loss": 0.7408, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.707236166971742e-05, | |
| "loss": 0.7046, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.7012302621246804e-05, | |
| "loss": 0.7657, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.695167289106629e-05, | |
| "loss": 0.8138, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.689047405103678e-05, | |
| "loss": 0.6964, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "eval_loss": 0.796372652053833, | |
| "eval_runtime": 5.3558, | |
| "eval_samples_per_second": 1.867, | |
| "eval_steps_per_second": 0.373, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.68287076877737e-05, | |
| "loss": 0.6541, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.6766375402605824e-05, | |
| "loss": 0.714, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.6703478811533794e-05, | |
| "loss": 0.6823, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.6640019545188216e-05, | |
| "loss": 0.8283, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.657599924878736e-05, | |
| "loss": 0.7126, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.651141958209453e-05, | |
| "loss": 0.6612, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.644628221937504e-05, | |
| "loss": 0.7339, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.638058884935279e-05, | |
| "loss": 0.6852, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.6314341175166485e-05, | |
| "loss": 0.6804, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.6247540914325504e-05, | |
| "loss": 0.7904, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.618018979866534e-05, | |
| "loss": 0.6545, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.611228957430272e-05, | |
| "loss": 0.7545, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.6043842001590344e-05, | |
| "loss": 0.7478, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.5974848855071206e-05, | |
| "loss": 0.8083, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.590531192343266e-05, | |
| "loss": 0.7435, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.5835233009459964e-05, | |
| "loss": 0.7647, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.576461392998961e-05, | |
| "loss": 0.7631, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.569345651586218e-05, | |
| "loss": 0.7674, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.5621762611874904e-05, | |
| "loss": 0.6737, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.55495340767338e-05, | |
| "loss": 0.6901, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.547677278300555e-05, | |
| "loss": 0.709, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.540348061706886e-05, | |
| "loss": 0.5995, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.5329659479065655e-05, | |
| "loss": 0.6832, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.525531128285173e-05, | |
| "loss": 0.7584, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.5180437955947195e-05, | |
| "loss": 0.6998, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.510504143948646e-05, | |
| "loss": 0.6477, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.502912368816794e-05, | |
| "loss": 0.681, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.4952686670203357e-05, | |
| "loss": 0.7638, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.487573236726673e-05, | |
| "loss": 0.7086, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.4798262774442986e-05, | |
| "loss": 0.6166, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.472027990017623e-05, | |
| "loss": 0.5901, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.464178576621771e-05, | |
| "loss": 0.8138, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.456278240757338e-05, | |
| "loss": 0.6773, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.4483271872451094e-05, | |
| "loss": 0.6576, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.44032562222076e-05, | |
| "loss": 0.7106, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.432273753129502e-05, | |
| "loss": 0.6479, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.4241717887207124e-05, | |
| "loss": 0.7102, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.416019939042515e-05, | |
| "loss": 0.7188, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.40781841543634e-05, | |
| "loss": 0.6996, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.399567430531444e-05, | |
| "loss": 0.7523, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.391267198239394e-05, | |
| "loss": 0.6279, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.3829179337485254e-05, | |
| "loss": 0.6718, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.374519853518362e-05, | |
| "loss": 0.668, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.366073175274004e-05, | |
| "loss": 0.8341, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.357578118000482e-05, | |
| "loss": 0.7262, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.3490349019370824e-05, | |
| "loss": 0.7613, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.340443748571636e-05, | |
| "loss": 0.6722, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.331804880634775e-05, | |
| "loss": 0.7229, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.3231185220941605e-05, | |
| "loss": 0.7265, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.3143848981486746e-05, | |
| "loss": 0.7289, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.305604235222582e-05, | |
| "loss": 0.6441, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.2967767609596624e-05, | |
| "loss": 0.772, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.287902704217304e-05, | |
| "loss": 0.705, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.2789822950605725e-05, | |
| "loss": 0.7711, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.2700157647562486e-05, | |
| "loss": 0.7688, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.261003345766832e-05, | |
| "loss": 0.6978, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.251945271744509e-05, | |
| "loss": 0.7418, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.242841777525101e-05, | |
| "loss": 0.6258, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.233693099121976e-05, | |
| "loss": 0.7988, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.224499473719926e-05, | |
| "loss": 0.6947, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.21526113966902e-05, | |
| "loss": 0.6712, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.205978336478427e-05, | |
| "loss": 0.6871, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.196651304810202e-05, | |
| "loss": 0.6682, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.187280286473048e-05, | |
| "loss": 0.6139, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.177865524416052e-05, | |
| "loss": 0.6259, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.168407262722377e-05, | |
| "loss": 0.6894, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.1589057466029444e-05, | |
| "loss": 0.8107, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.149361222390068e-05, | |
| "loss": 0.728, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.1397739375310736e-05, | |
| "loss": 0.6987, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.1301441405818794e-05, | |
| "loss": 0.7855, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.120472081200556e-05, | |
| "loss": 0.6786, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.1107580101408524e-05, | |
| "loss": 0.805, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.101002179245693e-05, | |
| "loss": 0.7006, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.09120484144065e-05, | |
| "loss": 0.7023, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.0813662507273885e-05, | |
| "loss": 0.7802, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.0714866621770775e-05, | |
| "loss": 0.6794, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.06156633192378e-05, | |
| "loss": 0.6833, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.051605517157809e-05, | |
| "loss": 0.7591, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.041604476119064e-05, | |
| "loss": 0.7059, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.0315634680903336e-05, | |
| "loss": 0.713, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.021482753390573e-05, | |
| "loss": 0.686, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.011362593368156e-05, | |
| "loss": 0.718, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.001203250394101e-05, | |
| "loss": 0.8431, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.9910049878552646e-05, | |
| "loss": 0.7043, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.9807680701475174e-05, | |
| "loss": 0.7591, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.970492762668887e-05, | |
| "loss": 0.7677, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.9601793318126776e-05, | |
| "loss": 0.6897, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.9498280449605664e-05, | |
| "loss": 0.753, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.939439170475666e-05, | |
| "loss": 0.7049, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.929012977695572e-05, | |
| "loss": 0.6654, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.918549736925378e-05, | |
| "loss": 0.6969, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.9080497194306686e-05, | |
| "loss": 0.6655, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.897513197430486e-05, | |
| "loss": 0.7118, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.8869404440902735e-05, | |
| "loss": 0.5986, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.876331733514792e-05, | |
| "loss": 0.7191, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.865687340741014e-05, | |
| "loss": 0.6433, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.855007541730996e-05, | |
| "loss": 0.6386, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.844292613364719e-05, | |
| "loss": 0.8535, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.833542833432916e-05, | |
| "loss": 0.7112, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.822758480629864e-05, | |
| "loss": 0.7226, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "eval_loss": 0.7749701738357544, | |
| "eval_runtime": 5.4698, | |
| "eval_samples_per_second": 1.828, | |
| "eval_steps_per_second": 0.366, | |
| "step": 200 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 618, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "total_flos": 5.041194107142144e+17, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |