| { | |
| "best_metric": 1.6087621450424194, | |
| "best_model_checkpoint": "./qlora-out/checkpoint-400", | |
| "epoch": 0.9034443817052513, | |
| "eval_steps": 100, | |
| "global_step": 400, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 2.6126, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 2.4954, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 6e-06, | |
| "loss": 2.4127, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 2.5176, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1e-05, | |
| "loss": 2.2497, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.2e-05, | |
| "loss": 2.4175, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.4000000000000001e-05, | |
| "loss": 2.6525, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 2.1653, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.8e-05, | |
| "loss": 2.1483, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2e-05, | |
| "loss": 2.3218, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.2000000000000003e-05, | |
| "loss": 2.2764, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2.4e-05, | |
| "loss": 2.2225, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2.6000000000000002e-05, | |
| "loss": 2.1253, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2.8000000000000003e-05, | |
| "loss": 2.1824, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 3e-05, | |
| "loss": 2.5585, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3.2000000000000005e-05, | |
| "loss": 2.1777, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3.4000000000000007e-05, | |
| "loss": 2.3706, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3.6e-05, | |
| "loss": 1.7773, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3.8e-05, | |
| "loss": 1.9601, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4e-05, | |
| "loss": 1.7374, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.2e-05, | |
| "loss": 2.0932, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.4000000000000006e-05, | |
| "loss": 1.6992, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.600000000000001e-05, | |
| "loss": 1.7488, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.8e-05, | |
| "loss": 1.8932, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 5e-05, | |
| "loss": 1.9512, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 5.2000000000000004e-05, | |
| "loss": 1.8087, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 5.4000000000000005e-05, | |
| "loss": 1.7846, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 5.6000000000000006e-05, | |
| "loss": 1.8874, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 5.8e-05, | |
| "loss": 1.919, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 6e-05, | |
| "loss": 1.7985, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 6.2e-05, | |
| "loss": 1.8528, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 6.400000000000001e-05, | |
| "loss": 1.8325, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 6.6e-05, | |
| "loss": 1.596, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 6.800000000000001e-05, | |
| "loss": 1.9767, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 7e-05, | |
| "loss": 1.6124, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 7.2e-05, | |
| "loss": 1.6764, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 7.4e-05, | |
| "loss": 1.8457, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 7.6e-05, | |
| "loss": 1.6134, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 7.800000000000001e-05, | |
| "loss": 1.9267, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 8e-05, | |
| "loss": 1.8999, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 8.2e-05, | |
| "loss": 1.6804, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 8.4e-05, | |
| "loss": 1.8109, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 8.6e-05, | |
| "loss": 1.7453, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 8.800000000000001e-05, | |
| "loss": 1.9238, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 9e-05, | |
| "loss": 1.5639, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 9.200000000000001e-05, | |
| "loss": 1.7264, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 9.4e-05, | |
| "loss": 1.9841, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 9.6e-05, | |
| "loss": 1.5782, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 9.8e-05, | |
| "loss": 1.8506, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001, | |
| "loss": 1.5642, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 9.999964526246879e-05, | |
| "loss": 1.7945, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 9.999858105490867e-05, | |
| "loss": 1.7237, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 9.999680739242022e-05, | |
| "loss": 1.7998, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 9.999432430017084e-05, | |
| "loss": 1.5217, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 9.999113181339437e-05, | |
| "loss": 1.8277, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 9.99872299773906e-05, | |
| "loss": 1.8734, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 9.998261884752462e-05, | |
| "loss": 1.5751, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 9.99772984892261e-05, | |
| "loss": 1.773, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 9.997126897798825e-05, | |
| "loss": 1.392, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 9.996453039936682e-05, | |
| "loss": 1.7622, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 9.995708284897889e-05, | |
| "loss": 1.7739, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 9.994892643250147e-05, | |
| "loss": 1.9572, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 9.994006126567006e-05, | |
| "loss": 1.6673, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 9.993048747427696e-05, | |
| "loss": 1.6756, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9.992020519416948e-05, | |
| "loss": 1.6236, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9.990921457124805e-05, | |
| "loss": 1.6774, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9.989751576146413e-05, | |
| "loss": 1.7076, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9.988510893081799e-05, | |
| "loss": 1.8763, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 9.98719942553564e-05, | |
| "loss": 1.6324, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 9.985817192117001e-05, | |
| "loss": 1.7579, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 9.984364212439088e-05, | |
| "loss": 1.6983, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 9.982840507118958e-05, | |
| "loss": 1.6052, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 9.981246097777231e-05, | |
| "loss": 1.7244, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 9.979581007037776e-05, | |
| "loss": 1.7328, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 9.977845258527403e-05, | |
| "loss": 1.5821, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 9.976038876875518e-05, | |
| "loss": 1.6529, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 9.974161887713775e-05, | |
| "loss": 1.6353, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 9.972214317675713e-05, | |
| "loss": 1.5552, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 9.970196194396381e-05, | |
| "loss": 1.5826, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 9.968107546511942e-05, | |
| "loss": 1.9228, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 9.965948403659267e-05, | |
| "loss": 1.7107, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 9.963718796475515e-05, | |
| "loss": 1.7575, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 9.961418756597702e-05, | |
| "loss": 1.5771, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 9.959048316662246e-05, | |
| "loss": 1.8133, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 9.956607510304508e-05, | |
| "loss": 1.5487, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 9.95409637215831e-05, | |
| "loss": 1.6759, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 9.951514937855454e-05, | |
| "loss": 1.8012, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 9.948863244025203e-05, | |
| "loss": 1.5686, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 9.94614132829377e-05, | |
| "loss": 1.8649, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 9.943349229283781e-05, | |
| "loss": 1.7711, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 9.94048698661373e-05, | |
| "loss": 1.6153, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 9.937554640897413e-05, | |
| "loss": 1.5359, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 9.934552233743352e-05, | |
| "loss": 1.2047, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 9.931479807754209e-05, | |
| "loss": 1.6326, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 9.928337406526172e-05, | |
| "loss": 1.4582, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 9.925125074648352e-05, | |
| "loss": 1.5503, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 9.921842857702132e-05, | |
| "loss": 1.9591, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 9.918490802260538e-05, | |
| "loss": 1.6741, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 9.915068955887563e-05, | |
| "loss": 1.5237, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 9.911577367137499e-05, | |
| "loss": 1.645, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "eval_loss": 1.6467467546463013, | |
| "eval_runtime": 380.1313, | |
| "eval_samples_per_second": 1.436, | |
| "eval_steps_per_second": 0.718, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 9.90801608555425e-05, | |
| "loss": 1.8182, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 9.904385161670625e-05, | |
| "loss": 1.3908, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 9.900684647007624e-05, | |
| "loss": 1.39, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 9.896914594073703e-05, | |
| "loss": 1.643, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 9.893075056364034e-05, | |
| "loss": 1.6435, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 9.889166088359741e-05, | |
| "loss": 1.6227, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 9.885187745527133e-05, | |
| "loss": 1.5884, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 9.881140084316907e-05, | |
| "loss": 1.5687, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 9.87702316216336e-05, | |
| "loss": 1.7089, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 9.87283703748356e-05, | |
| "loss": 1.6631, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 9.868581769676533e-05, | |
| "loss": 1.6771, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 9.864257419122405e-05, | |
| "loss": 1.5418, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 9.859864047181552e-05, | |
| "loss": 1.2749, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 9.855401716193732e-05, | |
| "loss": 1.5715, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 9.850870489477197e-05, | |
| "loss": 1.6176, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 9.846270431327793e-05, | |
| "loss": 1.8423, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 9.841601607018052e-05, | |
| "loss": 1.5878, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 9.83686408279626e-05, | |
| "loss": 1.661, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 9.832057925885525e-05, | |
| "loss": 1.6109, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 9.827183204482817e-05, | |
| "loss": 1.7273, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 9.822239987757998e-05, | |
| "loss": 1.8402, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 9.817228345852851e-05, | |
| "loss": 1.4303, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 9.812148349880076e-05, | |
| "loss": 1.2896, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 9.807000071922279e-05, | |
| "loss": 1.6951, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 9.801783585030959e-05, | |
| "loss": 1.7379, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 9.79649896322546e-05, | |
| "loss": 1.5715, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9.791146281491934e-05, | |
| "loss": 1.9257, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9.785725615782262e-05, | |
| "loss": 1.7964, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9.780237043012987e-05, | |
| "loss": 1.5906, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9.774680641064222e-05, | |
| "loss": 1.7997, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 9.769056488778537e-05, | |
| "loss": 1.6863, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 9.763364665959849e-05, | |
| "loss": 1.3936, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 9.757605253372283e-05, | |
| "loss": 1.6815, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 9.751778332739033e-05, | |
| "loss": 1.6042, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 9.745883986741196e-05, | |
| "loss": 1.4678, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 9.739922299016601e-05, | |
| "loss": 1.5455, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 9.733893354158627e-05, | |
| "loss": 1.8297, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 9.72779723771499e-05, | |
| "loss": 1.7075, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 9.721634036186544e-05, | |
| "loss": 1.7954, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 9.715403837026046e-05, | |
| "loss": 1.572, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 9.709106728636911e-05, | |
| "loss": 1.8047, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 9.702742800371971e-05, | |
| "loss": 1.5607, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 9.696312142532192e-05, | |
| "loss": 1.6854, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 9.689814846365399e-05, | |
| "loss": 1.7957, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 9.683251004064988e-05, | |
| "loss": 1.6497, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 9.676620708768607e-05, | |
| "loss": 1.5492, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 9.669924054556836e-05, | |
| "loss": 1.5161, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 9.663161136451861e-05, | |
| "loss": 1.7488, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 9.656332050416116e-05, | |
| "loss": 1.6544, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 9.649436893350929e-05, | |
| "loss": 1.5541, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 9.642475763095134e-05, | |
| "loss": 1.4365, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 9.635448758423702e-05, | |
| "loss": 1.5063, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 9.628355979046324e-05, | |
| "loss": 1.8647, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 9.621197525606e-05, | |
| "loss": 1.6168, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 9.613973499677614e-05, | |
| "loss": 1.6571, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 9.606684003766493e-05, | |
| "loss": 1.4834, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 9.599329141306946e-05, | |
| "loss": 1.3979, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 9.591909016660807e-05, | |
| "loss": 1.6255, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 9.584423735115938e-05, | |
| "loss": 1.4855, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 9.576873402884756e-05, | |
| "loss": 1.7269, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 9.569258127102707e-05, | |
| "loss": 1.7885, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 9.561578015826758e-05, | |
| "loss": 1.6543, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 9.553833178033855e-05, | |
| "loss": 1.6093, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 9.546023723619386e-05, | |
| "loss": 1.5658, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 9.538149763395612e-05, | |
| "loss": 1.5708, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 9.530211409090104e-05, | |
| "loss": 1.6403, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 9.522208773344147e-05, | |
| "loss": 1.2768, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 9.514141969711154e-05, | |
| "loss": 1.4333, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 9.506011112655044e-05, | |
| "loss": 1.5742, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 9.497816317548625e-05, | |
| "loss": 1.4319, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 9.489557700671948e-05, | |
| "loss": 1.7918, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 9.481235379210671e-05, | |
| "loss": 1.7546, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 9.472849471254386e-05, | |
| "loss": 1.6831, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 9.46440009579494e-05, | |
| "loss": 1.5752, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 9.455887372724761e-05, | |
| "loss": 1.5413, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 9.447311422835142e-05, | |
| "loss": 1.6797, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 9.438672367814532e-05, | |
| "loss": 1.7095, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 9.429970330246817e-05, | |
| "loss": 1.4101, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 9.421205433609568e-05, | |
| "loss": 1.7538, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 9.412377802272297e-05, | |
| "loss": 1.7469, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 9.403487561494691e-05, | |
| "loss": 1.6779, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 9.39453483742483e-05, | |
| "loss": 1.5635, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 9.385519757097405e-05, | |
| "loss": 1.6769, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 9.37644244843191e-05, | |
| "loss": 1.7653, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 9.367303040230828e-05, | |
| "loss": 1.6367, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 9.358101662177803e-05, | |
| "loss": 1.8703, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 9.348838444835798e-05, | |
| "loss": 1.6888, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 9.339513519645249e-05, | |
| "loss": 1.7564, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 9.330127018922194e-05, | |
| "loss": 1.6898, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 9.320679075856395e-05, | |
| "loss": 1.6917, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 9.311169824509454e-05, | |
| "loss": 1.6607, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 9.301599399812904e-05, | |
| "loss": 1.6159, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 9.291967937566297e-05, | |
| "loss": 1.4132, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 9.282275574435281e-05, | |
| "loss": 1.6247, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 9.272522447949652e-05, | |
| "loss": 1.7138, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 9.262708696501412e-05, | |
| "loss": 1.6365, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 9.2528344593428e-05, | |
| "loss": 1.5955, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 9.242899876584317e-05, | |
| "loss": 1.7734, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 9.232905089192734e-05, | |
| "loss": 1.7847, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 9.222850238989103e-05, | |
| "loss": 1.6459, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "eval_loss": 1.619735598564148, | |
| "eval_runtime": 380.3616, | |
| "eval_samples_per_second": 1.435, | |
| "eval_steps_per_second": 0.718, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 9.21273546864673e-05, | |
| "loss": 1.7114, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 9.202560921689166e-05, | |
| "loss": 1.591, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 9.192326742488152e-05, | |
| "loss": 1.6804, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 9.182033076261592e-05, | |
| "loss": 1.7938, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 9.171680069071471e-05, | |
| "loss": 1.4387, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 9.161267867821802e-05, | |
| "loss": 1.5711, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 9.150796620256525e-05, | |
| "loss": 1.5225, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 9.140266474957421e-05, | |
| "loss": 1.7374, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 9.129677581342e-05, | |
| "loss": 1.325, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 9.11903008966138e-05, | |
| "loss": 1.6654, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 9.10832415099816e-05, | |
| "loss": 1.7373, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 9.097559917264267e-05, | |
| "loss": 1.7258, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 9.086737541198811e-05, | |
| "loss": 1.6344, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 9.075857176365911e-05, | |
| "loss": 1.5559, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 9.064918977152516e-05, | |
| "loss": 1.7747, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 9.053923098766217e-05, | |
| "loss": 1.8065, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 9.042869697233046e-05, | |
| "loss": 1.821, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 9.031758929395258e-05, | |
| "loss": 1.718, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 9.020590952909105e-05, | |
| "loss": 1.6306, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 9.009365926242603e-05, | |
| "loss": 1.8161, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 8.998084008673285e-05, | |
| "loss": 1.2547, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 8.986745360285932e-05, | |
| "loss": 1.5912, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 8.975350141970311e-05, | |
| "loss": 1.5138, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 8.963898515418884e-05, | |
| "loss": 1.72, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 8.952390643124524e-05, | |
| "loss": 1.6342, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 8.940826688378197e-05, | |
| "loss": 1.6454, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 8.929206815266652e-05, | |
| "loss": 1.8337, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 8.917531188670095e-05, | |
| "loss": 1.5335, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 8.905799974259845e-05, | |
| "loss": 1.77, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 8.894013338495981e-05, | |
| "loss": 1.6785, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 8.882171448624988e-05, | |
| "loss": 1.6688, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 8.870274472677376e-05, | |
| "loss": 1.5964, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 8.8583225794653e-05, | |
| "loss": 1.7153, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 8.846315938580163e-05, | |
| "loss": 1.8064, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 8.834254720390213e-05, | |
| "loss": 1.6305, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 8.82213909603812e-05, | |
| "loss": 1.5684, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 8.809969237438548e-05, | |
| "loss": 1.7526, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 8.797745317275726e-05, | |
| "loss": 1.5233, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 8.78546750900098e-05, | |
| "loss": 1.2263, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 8.773135986830287e-05, | |
| "loss": 1.5587, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 8.760750925741799e-05, | |
| "loss": 1.7591, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.74831250147335e-05, | |
| "loss": 1.708, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.73582089051998e-05, | |
| "loss": 1.5663, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.723276270131422e-05, | |
| "loss": 1.2198, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.710678818309576e-05, | |
| "loss": 1.5559, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.698028713806004e-05, | |
| "loss": 1.5615, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.68532613611938e-05, | |
| "loss": 1.8736, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.672571265492943e-05, | |
| "loss": 1.802, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.659764282911948e-05, | |
| "loss": 1.8928, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.646905370101091e-05, | |
| "loss": 1.7009, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.63399470952193e-05, | |
| "loss": 1.4649, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.621032484370299e-05, | |
| "loss": 1.5459, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.608018878573709e-05, | |
| "loss": 1.7053, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.594954076788736e-05, | |
| "loss": 1.6225, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.581838264398401e-05, | |
| "loss": 1.6186, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.56867162750954e-05, | |
| "loss": 1.5048, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.555454352950161e-05, | |
| "loss": 1.6085, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.542186628266802e-05, | |
| "loss": 1.6226, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.528868641721857e-05, | |
| "loss": 1.8184, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 8.515500582290914e-05, | |
| "loss": 1.4474, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 8.502082639660067e-05, | |
| "loss": 1.8168, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 8.488615004223233e-05, | |
| "loss": 1.5756, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 8.475097867079436e-05, | |
| "loss": 1.6404, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8.461531420030117e-05, | |
| "loss": 1.6272, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8.44791585557639e-05, | |
| "loss": 1.6069, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8.434251366916322e-05, | |
| "loss": 1.7389, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8.420538147942197e-05, | |
| "loss": 1.4526, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 8.406776393237747e-05, | |
| "loss": 1.395, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 8.392966298075413e-05, | |
| "loss": 1.5559, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 8.379108058413553e-05, | |
| "loss": 1.5938, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 8.365201870893679e-05, | |
| "loss": 1.5633, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 8.351247932837655e-05, | |
| "loss": 1.6534, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 8.337246442244901e-05, | |
| "loss": 1.5687, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 8.323197597789589e-05, | |
| "loss": 1.5531, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 8.309101598817813e-05, | |
| "loss": 1.4546, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 8.294958645344766e-05, | |
| "loss": 1.5684, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 8.280768938051908e-05, | |
| "loss": 1.7263, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 8.266532678284103e-05, | |
| "loss": 1.7123, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 8.252250068046782e-05, | |
| "loss": 1.5563, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 8.237921310003059e-05, | |
| "loss": 1.5035, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 8.223546607470863e-05, | |
| "loss": 1.5747, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 8.209126164420055e-05, | |
| "loss": 1.6334, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 8.194660185469529e-05, | |
| "loss": 1.5158, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 8.18014887588431e-05, | |
| "loss": 1.4902, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 8.165592441572647e-05, | |
| "loss": 1.7115, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 8.15099108908308e-05, | |
| "loss": 1.7038, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 8.13634502560152e-05, | |
| "loss": 1.5703, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 8.1216544589483e-05, | |
| "loss": 1.61, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 8.106919597575238e-05, | |
| "loss": 1.7725, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 8.092140650562665e-05, | |
| "loss": 1.6933, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 8.07731782761647e-05, | |
| "loss": 1.4752, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 8.062451339065115e-05, | |
| "loss": 1.3992, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 8.047541395856661e-05, | |
| "loss": 1.5156, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 8.032588209555765e-05, | |
| "loss": 1.6108, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 8.01759199234068e-05, | |
| "loss": 1.633, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 8.002552957000254e-05, | |
| "loss": 1.6842, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 7.987471316930893e-05, | |
| "loss": 1.5822, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 7.972347286133548e-05, | |
| "loss": 1.5932, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 7.957181079210675e-05, | |
| "loss": 1.7149, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 7.941972911363186e-05, | |
| "loss": 1.5916, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "eval_loss": 1.6114922761917114, | |
| "eval_runtime": 379.6975, | |
| "eval_samples_per_second": 1.438, | |
| "eval_steps_per_second": 0.719, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 7.926722998387397e-05, | |
| "loss": 1.5477, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 7.911431556671968e-05, | |
| "loss": 1.4926, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 7.896098803194828e-05, | |
| "loss": 1.7314, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 7.880724955520104e-05, | |
| "loss": 1.7405, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 7.865310231795026e-05, | |
| "loss": 1.7367, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 7.849854850746833e-05, | |
| "loss": 1.7337, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 7.834359031679679e-05, | |
| "loss": 1.7436, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 7.818822994471505e-05, | |
| "loss": 1.8298, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 7.803246959570928e-05, | |
| "loss": 1.8664, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 7.78763114799412e-05, | |
| "loss": 1.7184, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 7.771975781321655e-05, | |
| "loss": 1.2468, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 7.75628108169538e-05, | |
| "loss": 1.6385, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 7.740547271815253e-05, | |
| "loss": 1.4619, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 7.724774574936188e-05, | |
| "loss": 1.5747, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 7.70896321486489e-05, | |
| "loss": 1.5747, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 7.693113415956674e-05, | |
| "loss": 1.6532, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 7.677225403112276e-05, | |
| "loss": 1.7604, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 7.661299401774678e-05, | |
| "loss": 1.7972, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 7.645335637925896e-05, | |
| "loss": 1.3371, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 7.629334338083773e-05, | |
| "loss": 1.7056, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 7.61329572929878e-05, | |
| "loss": 1.7155, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 7.597220039150768e-05, | |
| "loss": 1.6125, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 7.581107495745769e-05, | |
| "loss": 1.5879, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 7.564958327712734e-05, | |
| "loss": 1.639, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 7.548772764200307e-05, | |
| "loss": 1.7122, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 7.532551034873559e-05, | |
| "loss": 1.7528, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 7.516293369910737e-05, | |
| "loss": 1.5804, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 7.500000000000001e-05, | |
| "loss": 1.7145, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 7.483671156336141e-05, | |
| "loss": 1.5839, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.467307070617309e-05, | |
| "loss": 1.8584, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.450907975041715e-05, | |
| "loss": 1.4994, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.43447410230435e-05, | |
| "loss": 1.3246, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.418005685593669e-05, | |
| "loss": 1.6393, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.401502958588293e-05, | |
| "loss": 1.5387, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.384966155453685e-05, | |
| "loss": 1.3346, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.368395510838838e-05, | |
| "loss": 1.318, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.351791259872929e-05, | |
| "loss": 1.7234, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.335153638162005e-05, | |
| "loss": 1.5846, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 7.318482881785613e-05, | |
| "loss": 1.4967, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 7.301779227293475e-05, | |
| "loss": 1.6795, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 7.285042911702115e-05, | |
| "loss": 1.466, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 7.268274172491508e-05, | |
| "loss": 1.9118, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 7.251473247601698e-05, | |
| "loss": 1.5324, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 7.234640375429427e-05, | |
| "loss": 1.637, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 7.217775794824759e-05, | |
| "loss": 1.6558, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 7.200879745087681e-05, | |
| "loss": 1.6324, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 7.183952465964711e-05, | |
| "loss": 1.7496, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 7.166994197645496e-05, | |
| "loss": 1.7385, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 7.15000518075941e-05, | |
| "loss": 1.6461, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 7.132985656372126e-05, | |
| "loss": 1.7131, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 7.115935865982206e-05, | |
| "loss": 1.6787, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 7.098856051517673e-05, | |
| "loss": 1.6028, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 7.081746455332576e-05, | |
| "loss": 1.4347, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 7.064607320203551e-05, | |
| "loss": 1.5735, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 7.047438889326376e-05, | |
| "loss": 1.2504, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 7.030241406312528e-05, | |
| "loss": 1.9139, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 7.013015115185706e-05, | |
| "loss": 1.5579, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 6.995760260378395e-05, | |
| "loss": 1.6616, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 6.978477086728374e-05, | |
| "loss": 1.7095, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 6.961165839475263e-05, | |
| "loss": 1.8251, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 6.943826764257021e-05, | |
| "loss": 1.5992, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 6.926460107106482e-05, | |
| "loss": 1.712, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 6.909066114447847e-05, | |
| "loss": 1.6707, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 6.891645033093196e-05, | |
| "loss": 1.4995, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 6.874197110238985e-05, | |
| "loss": 1.666, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 6.856722593462537e-05, | |
| "loss": 1.5461, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 6.839221730718533e-05, | |
| "loss": 1.5381, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 6.821694770335481e-05, | |
| "loss": 1.7022, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 6.804141961012213e-05, | |
| "loss": 1.5809, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 6.786563551814333e-05, | |
| "loss": 1.8404, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 6.768959792170706e-05, | |
| "loss": 1.8898, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 6.751330931869897e-05, | |
| "loss": 1.5612, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 6.733677221056644e-05, | |
| "loss": 1.5037, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 6.715998910228296e-05, | |
| "loss": 1.5153, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 6.69829625023127e-05, | |
| "loss": 1.6037, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 6.68056949225748e-05, | |
| "loss": 1.2321, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 6.66281888784078e-05, | |
| "loss": 1.6523, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 6.645044688853395e-05, | |
| "loss": 1.443, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 6.627247147502343e-05, | |
| "loss": 1.775, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 6.60942651632586e-05, | |
| "loss": 1.5969, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 6.591583048189812e-05, | |
| "loss": 1.5039, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 6.573716996284113e-05, | |
| "loss": 1.6205, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 6.555828614119132e-05, | |
| "loss": 1.4704, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 6.537918155522087e-05, | |
| "loss": 1.5824, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 6.519985874633454e-05, | |
| "loss": 1.5881, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 6.502032025903355e-05, | |
| "loss": 1.5599, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 6.484056864087947e-05, | |
| "loss": 1.6519, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 6.466060644245815e-05, | |
| "loss": 1.6166, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 6.448043621734337e-05, | |
| "loss": 1.6451, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 6.430006052206082e-05, | |
| "loss": 1.7321, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 6.411948191605163e-05, | |
| "loss": 1.6647, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.393870296163615e-05, | |
| "loss": 1.2297, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.375772622397761e-05, | |
| "loss": 1.6895, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.357655427104562e-05, | |
| "loss": 1.5156, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.339518967357985e-05, | |
| "loss": 1.4977, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.321363500505347e-05, | |
| "loss": 1.5753, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 6.303189284163668e-05, | |
| "loss": 1.6325, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 6.284996576216014e-05, | |
| "loss": 1.6534, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 6.266785634807837e-05, | |
| "loss": 1.6973, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 6.248556718343314e-05, | |
| "loss": 1.3587, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "eval_loss": 1.6087621450424194, | |
| "eval_runtime": 379.6023, | |
| "eval_samples_per_second": 1.438, | |
| "eval_steps_per_second": 0.719, | |
| "step": 400 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 884, | |
| "num_train_epochs": 2, | |
| "save_steps": 100, | |
| "total_flos": 1.020569592004608e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |