|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 11.034906118012294, |
|
"learning_rate": 2e-07, |
|
"loss": 3.3072, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 8.342029333286245, |
|
"learning_rate": 4e-07, |
|
"loss": 2.5181, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 11.242982411667311, |
|
"learning_rate": 6e-07, |
|
"loss": 3.091, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 7.583970040249307, |
|
"learning_rate": 8e-07, |
|
"loss": 2.0275, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 9.459350774016439, |
|
"learning_rate": 1e-06, |
|
"loss": 2.7841, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 10.513961571396786, |
|
"learning_rate": 1.2e-06, |
|
"loss": 3.1251, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 8.543813815295838, |
|
"learning_rate": 1.4e-06, |
|
"loss": 2.7579, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.595095020234342, |
|
"learning_rate": 1.6e-06, |
|
"loss": 2.7238, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.695517534101955, |
|
"learning_rate": 1.8e-06, |
|
"loss": 2.8206, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.393498293102003, |
|
"learning_rate": 2e-06, |
|
"loss": 2.5533, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 7.260411320337986, |
|
"learning_rate": 1.9999675557165277e-06, |
|
"loss": 2.4632, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 11.350295546296435, |
|
"learning_rate": 1.9998702249713745e-06, |
|
"loss": 3.2465, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 7.22829832667976, |
|
"learning_rate": 1.999708014080193e-06, |
|
"loss": 2.7724, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.264505272794518, |
|
"learning_rate": 1.9994809335686148e-06, |
|
"loss": 1.4729, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.487224091906534, |
|
"learning_rate": 1.9991889981715695e-06, |
|
"loss": 2.2005, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.639447521961718, |
|
"learning_rate": 1.9988322268323264e-06, |
|
"loss": 2.7849, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.877140297568701, |
|
"learning_rate": 1.9984106427012667e-06, |
|
"loss": 3.1055, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.7730374876185957, |
|
"learning_rate": 1.9979242731343803e-06, |
|
"loss": 2.497, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.657636943178752, |
|
"learning_rate": 1.997373149691491e-06, |
|
"loss": 2.9273, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.0729887854058964, |
|
"learning_rate": 1.99675730813421e-06, |
|
"loss": 2.5135, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.670717031408749, |
|
"learning_rate": 1.996076788423613e-06, |
|
"loss": 2.4479, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.6910831327416003, |
|
"learning_rate": 1.9953316347176486e-06, |
|
"loss": 2.0386, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.699623336884353, |
|
"learning_rate": 1.994521895368273e-06, |
|
"loss": 2.8066, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.48403675028581, |
|
"learning_rate": 1.993647622918313e-06, |
|
"loss": 2.4701, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.099491388558407, |
|
"learning_rate": 1.9927088740980536e-06, |
|
"loss": 2.6852, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 2.951499898355411, |
|
"learning_rate": 1.991705709821562e-06, |
|
"loss": 2.4056, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 2.7261385470933357, |
|
"learning_rate": 1.990638195182729e-06, |
|
"loss": 3.1598, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.729816445627831, |
|
"learning_rate": 1.989506399451051e-06, |
|
"loss": 2.5593, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 6.3601179558914005, |
|
"learning_rate": 1.9883103960671304e-06, |
|
"loss": 2.3769, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 3.3425367150604486, |
|
"learning_rate": 1.9870502626379126e-06, |
|
"loss": 2.9407, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 3.167931438919616, |
|
"learning_rate": 1.9857260809316507e-06, |
|
"loss": 2.7142, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.4345245228046997, |
|
"learning_rate": 1.984337936872598e-06, |
|
"loss": 2.4862, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.301046482260705, |
|
"learning_rate": 1.9828859205354322e-06, |
|
"loss": 3.0665, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.8107607204332408, |
|
"learning_rate": 1.981370126139413e-06, |
|
"loss": 3.5054, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.6629979731983475, |
|
"learning_rate": 1.9797906520422674e-06, |
|
"loss": 2.7669, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.5923595481221224, |
|
"learning_rate": 1.9781476007338054e-06, |
|
"loss": 2.9641, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 2.8444306805466013, |
|
"learning_rate": 1.976441078829272e-06, |
|
"loss": 1.8331, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 3.5565670829561022, |
|
"learning_rate": 1.974671197062428e-06, |
|
"loss": 2.701, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 3.5201013445124274, |
|
"learning_rate": 1.972838070278364e-06, |
|
"loss": 2.3367, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.2569984535941283, |
|
"learning_rate": 1.970941817426052e-06, |
|
"loss": 2.0169, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 2.7463871353146065, |
|
"learning_rate": 1.9689825615506204e-06, |
|
"loss": 2.0741, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.4202758569232974, |
|
"learning_rate": 1.9669604297853765e-06, |
|
"loss": 2.512, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 2.3710708099074864, |
|
"learning_rate": 1.9648755533435517e-06, |
|
"loss": 1.5202, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.918111295902255, |
|
"learning_rate": 1.9627280675097907e-06, |
|
"loss": 3.0566, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 3.2329590759683264, |
|
"learning_rate": 1.9605181116313723e-06, |
|
"loss": 2.2885, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 3.5631175201185465, |
|
"learning_rate": 1.9582458291091663e-06, |
|
"loss": 1.5685, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.325773119529427, |
|
"learning_rate": 1.955911367388329e-06, |
|
"loss": 2.0483, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 3.5390088616305615, |
|
"learning_rate": 1.9535148779487363e-06, |
|
"loss": 2.3026, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 3.471780417925277, |
|
"learning_rate": 1.9510565162951534e-06, |
|
"loss": 2.9149, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.290534820938375, |
|
"learning_rate": 1.9485364419471455e-06, |
|
"loss": 2.5526, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 2.2790349959484844, |
|
"learning_rate": 1.945954818428725e-06, |
|
"loss": 2.12, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.937267527964616, |
|
"learning_rate": 1.943311813257743e-06, |
|
"loss": 2.506, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 3.5635099675775397, |
|
"learning_rate": 1.9406075979350173e-06, |
|
"loss": 2.7026, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 2.882350914422191, |
|
"learning_rate": 1.9378423479332045e-06, |
|
"loss": 2.3409, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.2384517113771034, |
|
"learning_rate": 1.9350162426854148e-06, |
|
"loss": 2.966, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.3327050036846506, |
|
"learning_rate": 1.9321294655735677e-06, |
|
"loss": 1.2433, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 2.573093983791755, |
|
"learning_rate": 1.929182203916493e-06, |
|
"loss": 1.5502, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 3.68389433943638, |
|
"learning_rate": 1.9261746489577764e-06, |
|
"loss": 1.3458, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 2.7282256066111046, |
|
"learning_rate": 1.9231069958533488e-06, |
|
"loss": 1.9563, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.1622073079116855, |
|
"learning_rate": 1.9199794436588242e-06, |
|
"loss": 2.3207, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.894897190825691, |
|
"learning_rate": 1.9167921953165826e-06, |
|
"loss": 2.6743, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 2.9685942776074956, |
|
"learning_rate": 1.9135454576426007e-06, |
|
"loss": 3.2693, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.9212586978415422, |
|
"learning_rate": 1.9102394413130343e-06, |
|
"loss": 2.5456, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.535902364146159, |
|
"learning_rate": 1.9068743608505452e-06, |
|
"loss": 2.387, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 3.217651704303671, |
|
"learning_rate": 1.9034504346103822e-06, |
|
"loss": 3.1621, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 2.6086959528633287, |
|
"learning_rate": 1.899967884766212e-06, |
|
"loss": 2.2165, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 3.8950659723726524, |
|
"learning_rate": 1.8964269372957036e-06, |
|
"loss": 2.049, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 3.472843103230003, |
|
"learning_rate": 1.892827821965864e-06, |
|
"loss": 2.1215, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 5.483803674424827, |
|
"learning_rate": 1.8891707723181292e-06, |
|
"loss": 2.8693, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.946591324147399, |
|
"learning_rate": 1.8854560256532098e-06, |
|
"loss": 2.7123, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.807522896392873, |
|
"learning_rate": 1.881683823015694e-06, |
|
"loss": 2.8497, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.6024822728403576, |
|
"learning_rate": 1.8778544091784046e-06, |
|
"loss": 2.6366, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 3.1804694638843825, |
|
"learning_rate": 1.873968032626518e-06, |
|
"loss": 2.4264, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 2.202324783789416, |
|
"learning_rate": 1.8700249455414392e-06, |
|
"loss": 1.9495, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 2.718012853530589, |
|
"learning_rate": 1.8660254037844386e-06, |
|
"loss": 2.4048, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.7344487811181324, |
|
"learning_rate": 1.861969666880049e-06, |
|
"loss": 2.9311, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 2.426119637752445, |
|
"learning_rate": 1.8578579979992264e-06, |
|
"loss": 3.2567, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 2.424553353557125, |
|
"learning_rate": 1.8536906639422723e-06, |
|
"loss": 1.3894, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.6332246429203585, |
|
"learning_rate": 1.849467935121521e-06, |
|
"loss": 2.0805, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.963679112021814, |
|
"learning_rate": 1.8451900855437948e-06, |
|
"loss": 2.2385, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 3.326048125575145, |
|
"learning_rate": 1.8408573927926221e-06, |
|
"loss": 2.2506, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 2.370657089874872, |
|
"learning_rate": 1.8364701380102264e-06, |
|
"loss": 1.3311, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 2.9535264322406065, |
|
"learning_rate": 1.8320286058792843e-06, |
|
"loss": 2.2799, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.8249889576883387, |
|
"learning_rate": 1.82753308460445e-06, |
|
"loss": 2.8564, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 3.7437058642564813, |
|
"learning_rate": 1.8229838658936564e-06, |
|
"loss": 2.4897, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 3.2803564250465915, |
|
"learning_rate": 1.818381244939187e-06, |
|
"loss": 2.4974, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 3.2484261848308376, |
|
"learning_rate": 1.8137255203985196e-06, |
|
"loss": 2.2902, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 2.6795014358036204, |
|
"learning_rate": 1.8090169943749474e-06, |
|
"loss": 2.1316, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 2.6292578695567417, |
|
"learning_rate": 1.8042559723979767e-06, |
|
"loss": 1.9798, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 2.1422464465838176, |
|
"learning_rate": 1.7994427634035012e-06, |
|
"loss": 2.3323, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 2.993979550191501, |
|
"learning_rate": 1.7945776797137542e-06, |
|
"loss": 2.1355, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 2.5678717134911375, |
|
"learning_rate": 1.7896610370170448e-06, |
|
"loss": 1.7538, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 2.723564728060478, |
|
"learning_rate": 1.7846931543472718e-06, |
|
"loss": 2.4138, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 2.8241408405850206, |
|
"learning_rate": 1.7796743540632221e-06, |
|
"loss": 2.8358, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 2.598460561460152, |
|
"learning_rate": 1.7746049618276543e-06, |
|
"loss": 2.2555, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 3.5370024371672346, |
|
"learning_rate": 1.7694853065861659e-06, |
|
"loss": 3.0556, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 2.144317687276328, |
|
"learning_rate": 1.7643157205458482e-06, |
|
"loss": 1.4138, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 2.5108458672744893, |
|
"learning_rate": 1.7590965391537313e-06, |
|
"loss": 2.1104, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 2.9980770618295, |
|
"learning_rate": 1.7538281010750166e-06, |
|
"loss": 2.6966, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.166782870443781, |
|
"learning_rate": 1.748510748171101e-06, |
|
"loss": 2.1128, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 400, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8005398036480.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|