|
{ |
|
"best_metric": 0.6945011615753174, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-400", |
|
"epoch": 0.02516910492370615, |
|
"eval_steps": 50, |
|
"global_step": 400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 6.292276230926537e-05, |
|
"grad_norm": 4.59516716003418, |
|
"learning_rate": 1e-05, |
|
"loss": 3.1281, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 6.292276230926537e-05, |
|
"eval_loss": 3.3380603790283203, |
|
"eval_runtime": 1980.002, |
|
"eval_samples_per_second": 13.519, |
|
"eval_steps_per_second": 3.38, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00012584552461853074, |
|
"grad_norm": 5.216956615447998, |
|
"learning_rate": 2e-05, |
|
"loss": 3.3234, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00018876828692779613, |
|
"grad_norm": 4.922913074493408, |
|
"learning_rate": 3e-05, |
|
"loss": 3.1657, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0002516910492370615, |
|
"grad_norm": 3.8697447776794434, |
|
"learning_rate": 4e-05, |
|
"loss": 2.998, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0003146138115463269, |
|
"grad_norm": 3.7430148124694824, |
|
"learning_rate": 5e-05, |
|
"loss": 2.6709, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.00037753657385559226, |
|
"grad_norm": 3.408188819885254, |
|
"learning_rate": 6e-05, |
|
"loss": 2.3179, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.00044045933616485765, |
|
"grad_norm": 3.0654706954956055, |
|
"learning_rate": 7e-05, |
|
"loss": 2.1039, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.000503382098474123, |
|
"grad_norm": 3.04288387298584, |
|
"learning_rate": 8e-05, |
|
"loss": 1.7594, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0005663048607833884, |
|
"grad_norm": 2.724019765853882, |
|
"learning_rate": 9e-05, |
|
"loss": 1.5574, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0006292276230926538, |
|
"grad_norm": 2.3080849647521973, |
|
"learning_rate": 0.0001, |
|
"loss": 1.4881, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0006921503854019191, |
|
"grad_norm": 2.039785861968994, |
|
"learning_rate": 9.99983777858264e-05, |
|
"loss": 1.422, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0007550731477111845, |
|
"grad_norm": 1.599783182144165, |
|
"learning_rate": 9.999351124856874e-05, |
|
"loss": 1.1494, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0008179959100204499, |
|
"grad_norm": 1.685577392578125, |
|
"learning_rate": 9.998540070400966e-05, |
|
"loss": 1.1811, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0008809186723297153, |
|
"grad_norm": 1.5412378311157227, |
|
"learning_rate": 9.997404667843075e-05, |
|
"loss": 1.1081, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0009438414346389807, |
|
"grad_norm": 1.567974328994751, |
|
"learning_rate": 9.995944990857849e-05, |
|
"loss": 1.1671, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.001006764196948246, |
|
"grad_norm": 1.5965992212295532, |
|
"learning_rate": 9.994161134161634e-05, |
|
"loss": 1.138, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0010696869592575113, |
|
"grad_norm": 1.4739341735839844, |
|
"learning_rate": 9.992053213506334e-05, |
|
"loss": 1.1432, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0011326097215667767, |
|
"grad_norm": 1.693847894668579, |
|
"learning_rate": 9.989621365671902e-05, |
|
"loss": 0.9893, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0011955324838760421, |
|
"grad_norm": 1.262269377708435, |
|
"learning_rate": 9.986865748457457e-05, |
|
"loss": 0.9833, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0012584552461853075, |
|
"grad_norm": 1.2658629417419434, |
|
"learning_rate": 9.983786540671051e-05, |
|
"loss": 0.9171, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0013213780084945729, |
|
"grad_norm": 1.2282474040985107, |
|
"learning_rate": 9.980383942118066e-05, |
|
"loss": 0.9978, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0013843007708038383, |
|
"grad_norm": 0.960390567779541, |
|
"learning_rate": 9.976658173588244e-05, |
|
"loss": 0.9271, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0014472235331131037, |
|
"grad_norm": 1.1571574211120605, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 0.8878, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.001510146295422369, |
|
"grad_norm": 1.1556771993637085, |
|
"learning_rate": 9.968238114591566e-05, |
|
"loss": 0.9895, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0015730690577316344, |
|
"grad_norm": 0.8927937746047974, |
|
"learning_rate": 9.96354437049027e-05, |
|
"loss": 0.7841, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0016359918200408998, |
|
"grad_norm": 1.126304030418396, |
|
"learning_rate": 9.95852854910781e-05, |
|
"loss": 0.9274, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0016989145823501652, |
|
"grad_norm": 1.238614797592163, |
|
"learning_rate": 9.953190975913647e-05, |
|
"loss": 0.9173, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0017618373446594306, |
|
"grad_norm": 0.9778956174850464, |
|
"learning_rate": 9.947531997255256e-05, |
|
"loss": 0.8841, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.001824760106968696, |
|
"grad_norm": 1.1729629039764404, |
|
"learning_rate": 9.941551980335652e-05, |
|
"loss": 0.8547, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0018876828692779614, |
|
"grad_norm": 0.9491772651672363, |
|
"learning_rate": 9.935251313189564e-05, |
|
"loss": 0.841, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0019506056315872267, |
|
"grad_norm": 1.2383917570114136, |
|
"learning_rate": 9.928630404658255e-05, |
|
"loss": 0.8724, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.002013528393896492, |
|
"grad_norm": 1.1126729249954224, |
|
"learning_rate": 9.921689684362989e-05, |
|
"loss": 0.8463, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0020764511562057575, |
|
"grad_norm": 1.2784355878829956, |
|
"learning_rate": 9.914429602677162e-05, |
|
"loss": 0.9435, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0021393739185150227, |
|
"grad_norm": 1.074558138847351, |
|
"learning_rate": 9.906850630697068e-05, |
|
"loss": 0.8062, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0022022966808242883, |
|
"grad_norm": 1.165006160736084, |
|
"learning_rate": 9.898953260211338e-05, |
|
"loss": 0.8545, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0022652194431335535, |
|
"grad_norm": 1.1657222509384155, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 0.9286, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.002328142205442819, |
|
"grad_norm": 1.1474952697753906, |
|
"learning_rate": 9.882205394146361e-05, |
|
"loss": 0.9362, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0023910649677520842, |
|
"grad_norm": 1.2673144340515137, |
|
"learning_rate": 9.87335598531214e-05, |
|
"loss": 0.8964, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.00245398773006135, |
|
"grad_norm": 1.1087899208068848, |
|
"learning_rate": 9.864190351391822e-05, |
|
"loss": 0.852, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.002516910492370615, |
|
"grad_norm": 0.9186160564422607, |
|
"learning_rate": 9.85470908713026e-05, |
|
"loss": 0.8565, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0025798332546798806, |
|
"grad_norm": 1.0054467916488647, |
|
"learning_rate": 9.844912807753104e-05, |
|
"loss": 0.9039, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0026427560169891458, |
|
"grad_norm": 0.9011829495429993, |
|
"learning_rate": 9.834802148926882e-05, |
|
"loss": 0.8552, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0027056787792984114, |
|
"grad_norm": 0.8763337135314941, |
|
"learning_rate": 9.824377766717759e-05, |
|
"loss": 0.8425, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0027686015416076766, |
|
"grad_norm": 1.0176167488098145, |
|
"learning_rate": 9.813640337548954e-05, |
|
"loss": 0.9245, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.002831524303916942, |
|
"grad_norm": 0.9574334621429443, |
|
"learning_rate": 9.802590558156862e-05, |
|
"loss": 0.8706, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0028944470662262073, |
|
"grad_norm": 1.0388308763504028, |
|
"learning_rate": 9.791229145545831e-05, |
|
"loss": 0.9249, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0029573698285354725, |
|
"grad_norm": 1.0037546157836914, |
|
"learning_rate": 9.779556836941645e-05, |
|
"loss": 0.9104, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.003020292590844738, |
|
"grad_norm": 0.877053439617157, |
|
"learning_rate": 9.767574389743682e-05, |
|
"loss": 0.9356, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0030832153531540033, |
|
"grad_norm": 0.9509828686714172, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.8892, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.003146138115463269, |
|
"grad_norm": 1.2426848411560059, |
|
"learning_rate": 9.742682209735727e-05, |
|
"loss": 0.9818, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.003146138115463269, |
|
"eval_loss": 0.9876257181167603, |
|
"eval_runtime": 1992.3189, |
|
"eval_samples_per_second": 13.435, |
|
"eval_steps_per_second": 3.359, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.003209060877772534, |
|
"grad_norm": 2.743227481842041, |
|
"learning_rate": 9.729774092143627e-05, |
|
"loss": 1.1564, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0032719836400817996, |
|
"grad_norm": 2.042041301727295, |
|
"learning_rate": 9.716559066288715e-05, |
|
"loss": 1.1023, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.003334906402391065, |
|
"grad_norm": 1.4456409215927124, |
|
"learning_rate": 9.703037989675087e-05, |
|
"loss": 1.0536, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0033978291647003304, |
|
"grad_norm": 1.1415081024169922, |
|
"learning_rate": 9.689211739666023e-05, |
|
"loss": 0.8627, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0034607519270095956, |
|
"grad_norm": 0.9333504438400269, |
|
"learning_rate": 9.675081213427076e-05, |
|
"loss": 0.9348, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.003523674689318861, |
|
"grad_norm": 0.7401662468910217, |
|
"learning_rate": 9.66064732786784e-05, |
|
"loss": 0.8109, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0035865974516281264, |
|
"grad_norm": 1.0346910953521729, |
|
"learning_rate": 9.645911019582467e-05, |
|
"loss": 0.9177, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.003649520213937392, |
|
"grad_norm": 0.9704263806343079, |
|
"learning_rate": 9.630873244788883e-05, |
|
"loss": 0.9524, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.003712442976246657, |
|
"grad_norm": 0.9952749609947205, |
|
"learning_rate": 9.615534979266745e-05, |
|
"loss": 0.8763, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.0037753657385559227, |
|
"grad_norm": 0.7279119491577148, |
|
"learning_rate": 9.599897218294122e-05, |
|
"loss": 0.72, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.003838288500865188, |
|
"grad_norm": 0.8951886892318726, |
|
"learning_rate": 9.583960976582913e-05, |
|
"loss": 0.8242, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0039012112631744535, |
|
"grad_norm": 1.0600823163986206, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 0.8879, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.003964134025483719, |
|
"grad_norm": 0.9607635736465454, |
|
"learning_rate": 9.551197206565173e-05, |
|
"loss": 0.8622, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.004027056787792984, |
|
"grad_norm": 0.8205705285072327, |
|
"learning_rate": 9.534371804252728e-05, |
|
"loss": 0.8198, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.00408997955010225, |
|
"grad_norm": 0.9197847247123718, |
|
"learning_rate": 9.517252173051911e-05, |
|
"loss": 0.7708, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.004152902312411515, |
|
"grad_norm": 1.0398162603378296, |
|
"learning_rate": 9.49983942383106e-05, |
|
"loss": 0.8404, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.00421582507472078, |
|
"grad_norm": 0.937667965888977, |
|
"learning_rate": 9.482134686478519e-05, |
|
"loss": 0.7937, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.004278747837030045, |
|
"grad_norm": 0.7911063432693481, |
|
"learning_rate": 9.464139109829321e-05, |
|
"loss": 0.7707, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.004341670599339311, |
|
"grad_norm": 0.9090245366096497, |
|
"learning_rate": 9.445853861590647e-05, |
|
"loss": 0.8013, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.004404593361648577, |
|
"grad_norm": 0.7896239757537842, |
|
"learning_rate": 9.42728012826605e-05, |
|
"loss": 0.7714, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.004467516123957842, |
|
"grad_norm": 0.8394172191619873, |
|
"learning_rate": 9.408419115078471e-05, |
|
"loss": 0.7667, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.004530438886267107, |
|
"grad_norm": 0.9385750889778137, |
|
"learning_rate": 9.389272045892024e-05, |
|
"loss": 0.7939, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.004593361648576372, |
|
"grad_norm": 0.8299635052680969, |
|
"learning_rate": 9.36984016313259e-05, |
|
"loss": 0.8294, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.004656284410885638, |
|
"grad_norm": 0.7701219916343689, |
|
"learning_rate": 9.350124727707197e-05, |
|
"loss": 0.8171, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.004719207173194903, |
|
"grad_norm": 0.7639235258102417, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.8171, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.0047821299355041685, |
|
"grad_norm": 0.8286775946617126, |
|
"learning_rate": 9.309848334400246e-05, |
|
"loss": 0.7997, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.004845052697813434, |
|
"grad_norm": 0.8708102703094482, |
|
"learning_rate": 9.289289989996133e-05, |
|
"loss": 0.8435, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0049079754601227, |
|
"grad_norm": 0.7374812960624695, |
|
"learning_rate": 9.268453319711363e-05, |
|
"loss": 0.7903, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.004970898222431965, |
|
"grad_norm": 0.7260936498641968, |
|
"learning_rate": 9.247339675607605e-05, |
|
"loss": 0.7556, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.00503382098474123, |
|
"grad_norm": 0.78413987159729, |
|
"learning_rate": 9.225950427718975e-05, |
|
"loss": 0.7595, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.005096743747050495, |
|
"grad_norm": 0.9290556907653809, |
|
"learning_rate": 9.204286963963111e-05, |
|
"loss": 0.8279, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.005159666509359761, |
|
"grad_norm": 0.8330967426300049, |
|
"learning_rate": 9.182350690051133e-05, |
|
"loss": 0.8712, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.005222589271669026, |
|
"grad_norm": 0.9688509702682495, |
|
"learning_rate": 9.160143029396422e-05, |
|
"loss": 0.8465, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.0052855120339782916, |
|
"grad_norm": 0.862083375453949, |
|
"learning_rate": 9.13766542302225e-05, |
|
"loss": 0.7303, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.005348434796287557, |
|
"grad_norm": 0.6805185675621033, |
|
"learning_rate": 9.114919329468282e-05, |
|
"loss": 0.7395, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.005411357558596823, |
|
"grad_norm": 0.8476338982582092, |
|
"learning_rate": 9.091906224695935e-05, |
|
"loss": 0.871, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.005474280320906088, |
|
"grad_norm": 0.7605541348457336, |
|
"learning_rate": 9.068627601992598e-05, |
|
"loss": 0.8041, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.005537203083215353, |
|
"grad_norm": 0.7872311472892761, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.7563, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.005600125845524618, |
|
"grad_norm": 0.7678333520889282, |
|
"learning_rate": 9.021279861989885e-05, |
|
"loss": 0.8247, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.005663048607833884, |
|
"grad_norm": 0.7843219637870789, |
|
"learning_rate": 8.997213817017507e-05, |
|
"loss": 0.8162, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0057259713701431495, |
|
"grad_norm": 0.6998229622840881, |
|
"learning_rate": 8.972888398568772e-05, |
|
"loss": 0.7625, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.005788894132452415, |
|
"grad_norm": 0.7472991943359375, |
|
"learning_rate": 8.948305185085225e-05, |
|
"loss": 0.81, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.00585181689476168, |
|
"grad_norm": 0.7633485198020935, |
|
"learning_rate": 8.92346577173636e-05, |
|
"loss": 0.8172, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.005914739657070945, |
|
"grad_norm": 0.759559690952301, |
|
"learning_rate": 8.898371770316111e-05, |
|
"loss": 0.8616, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.005977662419380211, |
|
"grad_norm": 0.8597033619880676, |
|
"learning_rate": 8.873024809138272e-05, |
|
"loss": 0.8313, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.006040585181689476, |
|
"grad_norm": 0.7939256429672241, |
|
"learning_rate": 8.847426532930831e-05, |
|
"loss": 0.8815, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.006103507943998741, |
|
"grad_norm": 0.8816700577735901, |
|
"learning_rate": 8.821578602729242e-05, |
|
"loss": 0.8923, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.0061664307063080065, |
|
"grad_norm": 0.9676582217216492, |
|
"learning_rate": 8.795482695768658e-05, |
|
"loss": 0.8964, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.0062293534686172726, |
|
"grad_norm": 0.8678397536277771, |
|
"learning_rate": 8.769140505375085e-05, |
|
"loss": 0.8964, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.006292276230926538, |
|
"grad_norm": 0.9837563633918762, |
|
"learning_rate": 8.742553740855506e-05, |
|
"loss": 0.8875, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.006292276230926538, |
|
"eval_loss": 0.9126793146133423, |
|
"eval_runtime": 1993.501, |
|
"eval_samples_per_second": 13.427, |
|
"eval_steps_per_second": 3.357, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.006355198993235803, |
|
"grad_norm": 1.4781923294067383, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 0.9134, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.006418121755545068, |
|
"grad_norm": 1.337426781654358, |
|
"learning_rate": 8.688653405904652e-05, |
|
"loss": 0.9012, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.006481044517854334, |
|
"grad_norm": 1.006500244140625, |
|
"learning_rate": 8.661343332988869e-05, |
|
"loss": 0.8645, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.006543967280163599, |
|
"grad_norm": 0.842659592628479, |
|
"learning_rate": 8.633795680751116e-05, |
|
"loss": 0.8778, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.0066068900424728644, |
|
"grad_norm": 0.7573593854904175, |
|
"learning_rate": 8.606012236719073e-05, |
|
"loss": 0.8292, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.00666981280478213, |
|
"grad_norm": 0.8174044489860535, |
|
"learning_rate": 8.577994803720606e-05, |
|
"loss": 0.8189, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.006732735567091396, |
|
"grad_norm": 0.8135045766830444, |
|
"learning_rate": 8.549745199766792e-05, |
|
"loss": 0.7401, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.006795658329400661, |
|
"grad_norm": 0.8730856776237488, |
|
"learning_rate": 8.521265257933948e-05, |
|
"loss": 0.7662, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.006858581091709926, |
|
"grad_norm": 0.8787916898727417, |
|
"learning_rate": 8.492556826244687e-05, |
|
"loss": 0.8372, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.006921503854019191, |
|
"grad_norm": 0.7037457227706909, |
|
"learning_rate": 8.463621767547998e-05, |
|
"loss": 0.7454, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.006984426616328457, |
|
"grad_norm": 0.7457066774368286, |
|
"learning_rate": 8.434461959398376e-05, |
|
"loss": 0.7488, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.007047349378637722, |
|
"grad_norm": 0.7018367648124695, |
|
"learning_rate": 8.405079293933986e-05, |
|
"loss": 0.7122, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.0071102721409469875, |
|
"grad_norm": 0.7626819610595703, |
|
"learning_rate": 8.375475677753881e-05, |
|
"loss": 0.7963, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.007173194903256253, |
|
"grad_norm": 0.7396589517593384, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 0.8108, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.007236117665565519, |
|
"grad_norm": 0.7143003344535828, |
|
"learning_rate": 8.315613291203976e-05, |
|
"loss": 0.8086, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.007299040427874784, |
|
"grad_norm": 0.8025134205818176, |
|
"learning_rate": 8.285358405218655e-05, |
|
"loss": 0.7624, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.007361963190184049, |
|
"grad_norm": 0.7734766602516174, |
|
"learning_rate": 8.25489033703452e-05, |
|
"loss": 0.7575, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.007424885952493314, |
|
"grad_norm": 0.6909343600273132, |
|
"learning_rate": 8.224211063680853e-05, |
|
"loss": 0.7947, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.007487808714802579, |
|
"grad_norm": 0.7197896838188171, |
|
"learning_rate": 8.19332257589174e-05, |
|
"loss": 0.7929, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.0075507314771118455, |
|
"grad_norm": 0.7764743566513062, |
|
"learning_rate": 8.162226877976887e-05, |
|
"loss": 0.8279, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.007613654239421111, |
|
"grad_norm": 0.824350118637085, |
|
"learning_rate": 8.130925987691569e-05, |
|
"loss": 0.868, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.007676577001730376, |
|
"grad_norm": 0.7358341813087463, |
|
"learning_rate": 8.099421936105702e-05, |
|
"loss": 0.7942, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.007739499764039641, |
|
"grad_norm": 0.6394578218460083, |
|
"learning_rate": 8.067716767472045e-05, |
|
"loss": 0.766, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.007802422526348907, |
|
"grad_norm": 0.860275149345398, |
|
"learning_rate": 8.035812539093557e-05, |
|
"loss": 0.8043, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.007865345288658171, |
|
"grad_norm": 0.7427248358726501, |
|
"learning_rate": 8.003711321189895e-05, |
|
"loss": 0.7719, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.007928268050967437, |
|
"grad_norm": 0.7343852519989014, |
|
"learning_rate": 7.971415196763088e-05, |
|
"loss": 0.7928, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.007991190813276703, |
|
"grad_norm": 0.7235080003738403, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.753, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.008054113575585968, |
|
"grad_norm": 0.6687210202217102, |
|
"learning_rate": 7.906246623448183e-05, |
|
"loss": 0.738, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.008117036337895234, |
|
"grad_norm": 0.7192130088806152, |
|
"learning_rate": 7.873378403255419e-05, |
|
"loss": 0.7538, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.0081799591002045, |
|
"grad_norm": 0.7208861708641052, |
|
"learning_rate": 7.840323733655778e-05, |
|
"loss": 0.7941, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.008242881862513764, |
|
"grad_norm": 0.8294458985328674, |
|
"learning_rate": 7.807084759519405e-05, |
|
"loss": 0.7723, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.00830580462482303, |
|
"grad_norm": 0.7591488361358643, |
|
"learning_rate": 7.773663637675694e-05, |
|
"loss": 0.837, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.008368727387132294, |
|
"grad_norm": 0.6974380612373352, |
|
"learning_rate": 7.740062536773352e-05, |
|
"loss": 0.6939, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.00843165014944156, |
|
"grad_norm": 0.6800892353057861, |
|
"learning_rate": 7.706283637139658e-05, |
|
"loss": 0.8119, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.008494572911750826, |
|
"grad_norm": 0.7402788996696472, |
|
"learning_rate": 7.672329130639005e-05, |
|
"loss": 0.8032, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.00855749567406009, |
|
"grad_norm": 0.7551749348640442, |
|
"learning_rate": 7.638201220530665e-05, |
|
"loss": 0.7905, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.008620418436369357, |
|
"grad_norm": 0.6108826398849487, |
|
"learning_rate": 7.603902121325813e-05, |
|
"loss": 0.7494, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.008683341198678623, |
|
"grad_norm": 0.7117067575454712, |
|
"learning_rate": 7.569434058643844e-05, |
|
"loss": 0.7418, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.008746263960987887, |
|
"grad_norm": 0.6873885989189148, |
|
"learning_rate": 7.534799269067953e-05, |
|
"loss": 0.7915, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.008809186723297153, |
|
"grad_norm": 0.7516224384307861, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.7748, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.008872109485606417, |
|
"grad_norm": 0.724405825138092, |
|
"learning_rate": 7.465038509514688e-05, |
|
"loss": 0.8016, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.008935032247915684, |
|
"grad_norm": 0.7117589116096497, |
|
"learning_rate": 7.42991706621303e-05, |
|
"loss": 0.7227, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.00899795501022495, |
|
"grad_norm": 0.7572246193885803, |
|
"learning_rate": 7.394637949075154e-05, |
|
"loss": 0.7636, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.009060877772534214, |
|
"grad_norm": 0.7657356858253479, |
|
"learning_rate": 7.35920344731241e-05, |
|
"loss": 0.7762, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.00912380053484348, |
|
"grad_norm": 0.8046727776527405, |
|
"learning_rate": 7.323615860218843e-05, |
|
"loss": 0.7803, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.009186723297152744, |
|
"grad_norm": 0.7643633484840393, |
|
"learning_rate": 7.287877497021978e-05, |
|
"loss": 0.8089, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.00924964605946201, |
|
"grad_norm": 0.9501251578330994, |
|
"learning_rate": 7.251990676732984e-05, |
|
"loss": 0.828, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.009312568821771276, |
|
"grad_norm": 0.7604308724403381, |
|
"learning_rate": 7.215957727996207e-05, |
|
"loss": 0.7821, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.00937549158408054, |
|
"grad_norm": 0.8432756662368774, |
|
"learning_rate": 7.179780988938051e-05, |
|
"loss": 0.9183, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.009438414346389807, |
|
"grad_norm": 0.9484951496124268, |
|
"learning_rate": 7.143462807015271e-05, |
|
"loss": 0.8954, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.009438414346389807, |
|
"eval_loss": 0.8508211374282837, |
|
"eval_runtime": 1993.8307, |
|
"eval_samples_per_second": 13.425, |
|
"eval_steps_per_second": 3.356, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.009501337108699073, |
|
"grad_norm": 1.1908949613571167, |
|
"learning_rate": 7.107005538862646e-05, |
|
"loss": 0.8596, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.009564259871008337, |
|
"grad_norm": 1.1169883012771606, |
|
"learning_rate": 7.07041155014006e-05, |
|
"loss": 0.9571, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.009627182633317603, |
|
"grad_norm": 0.7980979681015015, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 0.7766, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.009690105395626867, |
|
"grad_norm": 0.7071455717086792, |
|
"learning_rate": 6.996822917828477e-05, |
|
"loss": 0.8339, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.009753028157936133, |
|
"grad_norm": 0.7221279144287109, |
|
"learning_rate": 6.959833049300377e-05, |
|
"loss": 0.7941, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.0098159509202454, |
|
"grad_norm": 0.6975688934326172, |
|
"learning_rate": 6.922716010014255e-05, |
|
"loss": 0.7402, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.009878873682554664, |
|
"grad_norm": 0.6575108170509338, |
|
"learning_rate": 6.885474208441603e-05, |
|
"loss": 0.7297, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.00994179644486393, |
|
"grad_norm": 0.7444519996643066, |
|
"learning_rate": 6.848110061149556e-05, |
|
"loss": 0.7287, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.010004719207173196, |
|
"grad_norm": 0.7170855402946472, |
|
"learning_rate": 6.810625992644085e-05, |
|
"loss": 0.7978, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.01006764196948246, |
|
"grad_norm": 0.6473238468170166, |
|
"learning_rate": 6.773024435212678e-05, |
|
"loss": 0.7296, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.010130564731791726, |
|
"grad_norm": 0.6617797017097473, |
|
"learning_rate": 6.735307828766515e-05, |
|
"loss": 0.6883, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.01019348749410099, |
|
"grad_norm": 0.7170284390449524, |
|
"learning_rate": 6.697478620682137e-05, |
|
"loss": 0.7134, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.010256410256410256, |
|
"grad_norm": 0.653538167476654, |
|
"learning_rate": 6.659539265642643e-05, |
|
"loss": 0.7518, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.010319333018719522, |
|
"grad_norm": 0.6872601509094238, |
|
"learning_rate": 6.621492225478414e-05, |
|
"loss": 0.7379, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.010382255781028787, |
|
"grad_norm": 0.7626883387565613, |
|
"learning_rate": 6.583339969007363e-05, |
|
"loss": 0.7486, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.010445178543338053, |
|
"grad_norm": 0.7144574522972107, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.7396, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.010508101305647317, |
|
"grad_norm": 0.7148804664611816, |
|
"learning_rate": 6.506729716392481e-05, |
|
"loss": 0.7176, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.010571024067956583, |
|
"grad_norm": 0.7021417021751404, |
|
"learning_rate": 6.468276691378155e-05, |
|
"loss": 0.7104, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.01063394683026585, |
|
"grad_norm": 0.7372605204582214, |
|
"learning_rate": 6.429728391993446e-05, |
|
"loss": 0.8647, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.010696869592575113, |
|
"grad_norm": 0.7560912370681763, |
|
"learning_rate": 6.391087319582264e-05, |
|
"loss": 0.7326, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01075979235488438, |
|
"grad_norm": 0.6679472923278809, |
|
"learning_rate": 6.35235598150842e-05, |
|
"loss": 0.7417, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.010822715117193646, |
|
"grad_norm": 0.6532546281814575, |
|
"learning_rate": 6.313536890992935e-05, |
|
"loss": 0.6867, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.01088563787950291, |
|
"grad_norm": 0.6303436160087585, |
|
"learning_rate": 6.274632566950967e-05, |
|
"loss": 0.7392, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.010948560641812176, |
|
"grad_norm": 0.6903069019317627, |
|
"learning_rate": 6.235645533828349e-05, |
|
"loss": 0.7651, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.01101148340412144, |
|
"grad_norm": 0.6938333511352539, |
|
"learning_rate": 6.19657832143779e-05, |
|
"loss": 0.6829, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.011074406166430706, |
|
"grad_norm": 0.6610081791877747, |
|
"learning_rate": 6.157433464794716e-05, |
|
"loss": 0.7268, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.011137328928739972, |
|
"grad_norm": 0.6725570559501648, |
|
"learning_rate": 6.118213503952779e-05, |
|
"loss": 0.7439, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.011200251691049237, |
|
"grad_norm": 0.6829043626785278, |
|
"learning_rate": 6.078920983839031e-05, |
|
"loss": 0.7467, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.011263174453358503, |
|
"grad_norm": 0.6502189040184021, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 0.7015, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.011326097215667769, |
|
"grad_norm": 0.661758303642273, |
|
"learning_rate": 6.0001284688802226e-05, |
|
"loss": 0.7296, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.011389019977977033, |
|
"grad_norm": 0.7520274519920349, |
|
"learning_rate": 5.960633586768543e-05, |
|
"loss": 0.7552, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.011451942740286299, |
|
"grad_norm": 0.7441068291664124, |
|
"learning_rate": 5.921076370520058e-05, |
|
"loss": 0.7236, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.011514865502595563, |
|
"grad_norm": 0.6942856311798096, |
|
"learning_rate": 5.8814593869458455e-05, |
|
"loss": 0.7201, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.01157778826490483, |
|
"grad_norm": 0.6812763810157776, |
|
"learning_rate": 5.841785206735192e-05, |
|
"loss": 0.7475, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.011640711027214095, |
|
"grad_norm": 0.6556635499000549, |
|
"learning_rate": 5.8020564042888015e-05, |
|
"loss": 0.8007, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.01170363378952336, |
|
"grad_norm": 0.684214174747467, |
|
"learning_rate": 5.762275557551727e-05, |
|
"loss": 0.7799, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.011766556551832626, |
|
"grad_norm": 0.7426326870918274, |
|
"learning_rate": 5.7224452478461064e-05, |
|
"loss": 0.8322, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.01182947931414189, |
|
"grad_norm": 0.681393027305603, |
|
"learning_rate": 5.682568059703659e-05, |
|
"loss": 0.7452, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.011892402076451156, |
|
"grad_norm": 0.6602080464363098, |
|
"learning_rate": 5.642646580697973e-05, |
|
"loss": 0.764, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.011955324838760422, |
|
"grad_norm": 0.6899963021278381, |
|
"learning_rate": 5.602683401276615e-05, |
|
"loss": 0.7555, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.012018247601069686, |
|
"grad_norm": 0.5832464098930359, |
|
"learning_rate": 5.562681114593028e-05, |
|
"loss": 0.7499, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.012081170363378952, |
|
"grad_norm": 0.6253865957260132, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 0.7261, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.012144093125688218, |
|
"grad_norm": 0.7363515496253967, |
|
"learning_rate": 5.482569604572576e-05, |
|
"loss": 0.7222, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.012207015887997483, |
|
"grad_norm": 0.7669686079025269, |
|
"learning_rate": 5.442465579556793e-05, |
|
"loss": 0.7768, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.012269938650306749, |
|
"grad_norm": 0.6593098640441895, |
|
"learning_rate": 5.402332843583631e-05, |
|
"loss": 0.7953, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.012332861412616013, |
|
"grad_norm": 0.6666168570518494, |
|
"learning_rate": 5.3621740008088126e-05, |
|
"loss": 0.7774, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.012395784174925279, |
|
"grad_norm": 0.7173077464103699, |
|
"learning_rate": 5.321991657082097e-05, |
|
"loss": 0.7483, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.012458706937234545, |
|
"grad_norm": 0.7566263675689697, |
|
"learning_rate": 5.281788419778187e-05, |
|
"loss": 0.7738, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.01252162969954381, |
|
"grad_norm": 0.7862094044685364, |
|
"learning_rate": 5.2415668976275355e-05, |
|
"loss": 0.815, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.012584552461853075, |
|
"grad_norm": 0.9621549844741821, |
|
"learning_rate": 5.201329700547076e-05, |
|
"loss": 0.8267, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.012584552461853075, |
|
"eval_loss": 0.8121484518051147, |
|
"eval_runtime": 1992.7213, |
|
"eval_samples_per_second": 13.432, |
|
"eval_steps_per_second": 3.358, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.012647475224162342, |
|
"grad_norm": 1.1414529085159302, |
|
"learning_rate": 5.161079439470866e-05, |
|
"loss": 0.8951, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.012710397986471606, |
|
"grad_norm": 0.9856881499290466, |
|
"learning_rate": 5.1208187261806615e-05, |
|
"loss": 0.8464, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.012773320748780872, |
|
"grad_norm": 0.7981590628623962, |
|
"learning_rate": 5.080550173136457e-05, |
|
"loss": 0.7748, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.012836243511090136, |
|
"grad_norm": 0.7538502216339111, |
|
"learning_rate": 5.0402763933069496e-05, |
|
"loss": 0.7969, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.012899166273399402, |
|
"grad_norm": 0.6282597184181213, |
|
"learning_rate": 5e-05, |
|
"loss": 0.7767, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.012962089035708668, |
|
"grad_norm": 0.5800930261611938, |
|
"learning_rate": 4.9597236066930516e-05, |
|
"loss": 0.7049, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.013025011798017933, |
|
"grad_norm": 0.5951983332633972, |
|
"learning_rate": 4.919449826863544e-05, |
|
"loss": 0.7179, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.013087934560327199, |
|
"grad_norm": 0.6343929171562195, |
|
"learning_rate": 4.87918127381934e-05, |
|
"loss": 0.7377, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.013150857322636465, |
|
"grad_norm": 0.6349851489067078, |
|
"learning_rate": 4.8389205605291365e-05, |
|
"loss": 0.6433, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.013213780084945729, |
|
"grad_norm": 0.6455802321434021, |
|
"learning_rate": 4.798670299452926e-05, |
|
"loss": 0.7163, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.013276702847254995, |
|
"grad_norm": 0.6890186071395874, |
|
"learning_rate": 4.758433102372466e-05, |
|
"loss": 0.7404, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.01333962560956426, |
|
"grad_norm": 0.6462453007698059, |
|
"learning_rate": 4.7182115802218126e-05, |
|
"loss": 0.7984, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.013402548371873525, |
|
"grad_norm": 0.6188192963600159, |
|
"learning_rate": 4.678008342917903e-05, |
|
"loss": 0.7616, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.013465471134182791, |
|
"grad_norm": 0.7416461706161499, |
|
"learning_rate": 4.6378259991911886e-05, |
|
"loss": 0.7367, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.013528393896492056, |
|
"grad_norm": 0.5736991167068481, |
|
"learning_rate": 4.597667156416371e-05, |
|
"loss": 0.6972, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.013591316658801322, |
|
"grad_norm": 0.6165981292724609, |
|
"learning_rate": 4.5575344204432084e-05, |
|
"loss": 0.7462, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.013654239421110586, |
|
"grad_norm": 0.6969764828681946, |
|
"learning_rate": 4.5174303954274244e-05, |
|
"loss": 0.7823, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.013717162183419852, |
|
"grad_norm": 0.6673752069473267, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 0.7428, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.013780084945729118, |
|
"grad_norm": 0.6823404431343079, |
|
"learning_rate": 4.437318885406973e-05, |
|
"loss": 0.7402, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.013843007708038382, |
|
"grad_norm": 0.6113579273223877, |
|
"learning_rate": 4.397316598723385e-05, |
|
"loss": 0.7455, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.013905930470347648, |
|
"grad_norm": 0.6530716419219971, |
|
"learning_rate": 4.3573534193020274e-05, |
|
"loss": 0.7278, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.013968853232656914, |
|
"grad_norm": 0.6049585342407227, |
|
"learning_rate": 4.317431940296343e-05, |
|
"loss": 0.698, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.014031775994966179, |
|
"grad_norm": 0.5957211256027222, |
|
"learning_rate": 4.277554752153895e-05, |
|
"loss": 0.692, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.014094698757275445, |
|
"grad_norm": 0.6721015572547913, |
|
"learning_rate": 4.237724442448273e-05, |
|
"loss": 0.7595, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.014157621519584709, |
|
"grad_norm": 0.6083941459655762, |
|
"learning_rate": 4.197943595711198e-05, |
|
"loss": 0.7301, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.014220544281893975, |
|
"grad_norm": 0.6055333614349365, |
|
"learning_rate": 4.1582147932648074e-05, |
|
"loss": 0.673, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.014283467044203241, |
|
"grad_norm": 0.6080695390701294, |
|
"learning_rate": 4.118540613054156e-05, |
|
"loss": 0.7346, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.014346389806512505, |
|
"grad_norm": 0.6798868179321289, |
|
"learning_rate": 4.078923629479943e-05, |
|
"loss": 0.7636, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.014409312568821771, |
|
"grad_norm": 0.6028340458869934, |
|
"learning_rate": 4.039366413231458e-05, |
|
"loss": 0.7008, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.014472235331131037, |
|
"grad_norm": 0.6678457856178284, |
|
"learning_rate": 3.9998715311197785e-05, |
|
"loss": 0.7236, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.014535158093440302, |
|
"grad_norm": 0.6005433201789856, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 0.6898, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.014598080855749568, |
|
"grad_norm": 0.6036130785942078, |
|
"learning_rate": 3.92107901616097e-05, |
|
"loss": 0.7337, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.014661003618058832, |
|
"grad_norm": 0.6133017539978027, |
|
"learning_rate": 3.8817864960472236e-05, |
|
"loss": 0.7035, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.014723926380368098, |
|
"grad_norm": 0.6579994559288025, |
|
"learning_rate": 3.842566535205286e-05, |
|
"loss": 0.7428, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.014786849142677364, |
|
"grad_norm": 0.6396896839141846, |
|
"learning_rate": 3.803421678562213e-05, |
|
"loss": 0.7253, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.014849771904986628, |
|
"grad_norm": 0.5843393206596375, |
|
"learning_rate": 3.764354466171652e-05, |
|
"loss": 0.6655, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.014912694667295895, |
|
"grad_norm": 0.732956051826477, |
|
"learning_rate": 3.725367433049033e-05, |
|
"loss": 0.7606, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.014975617429605159, |
|
"grad_norm": 0.5860475897789001, |
|
"learning_rate": 3.6864631090070655e-05, |
|
"loss": 0.6058, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.015038540191914425, |
|
"grad_norm": 0.6364427208900452, |
|
"learning_rate": 3.6476440184915815e-05, |
|
"loss": 0.7429, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.015101462954223691, |
|
"grad_norm": 0.7146442532539368, |
|
"learning_rate": 3.608912680417737e-05, |
|
"loss": 0.7055, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.015164385716532955, |
|
"grad_norm": 0.8253147602081299, |
|
"learning_rate": 3.570271608006555e-05, |
|
"loss": 0.7516, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.015227308478842221, |
|
"grad_norm": 0.6781508326530457, |
|
"learning_rate": 3.531723308621847e-05, |
|
"loss": 0.6859, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.015290231241151487, |
|
"grad_norm": 0.7546252012252808, |
|
"learning_rate": 3.493270283607522e-05, |
|
"loss": 0.7951, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.015353154003460752, |
|
"grad_norm": 0.6175801753997803, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.7232, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.015416076765770018, |
|
"grad_norm": 0.7104986906051636, |
|
"learning_rate": 3.4166600309926387e-05, |
|
"loss": 0.7164, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.015478999528079282, |
|
"grad_norm": 0.761751115322113, |
|
"learning_rate": 3.3785077745215873e-05, |
|
"loss": 0.7805, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.015541922290388548, |
|
"grad_norm": 0.7329543232917786, |
|
"learning_rate": 3.340460734357359e-05, |
|
"loss": 0.7607, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.015604845052697814, |
|
"grad_norm": 0.8153419494628906, |
|
"learning_rate": 3.3025213793178646e-05, |
|
"loss": 0.8694, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.01566776781500708, |
|
"grad_norm": 0.8363320231437683, |
|
"learning_rate": 3.264692171233485e-05, |
|
"loss": 0.8512, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.015730690577316343, |
|
"grad_norm": 0.8638584613800049, |
|
"learning_rate": 3.226975564787322e-05, |
|
"loss": 0.9056, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.015730690577316343, |
|
"eval_loss": 0.7725260853767395, |
|
"eval_runtime": 1991.309, |
|
"eval_samples_per_second": 13.442, |
|
"eval_steps_per_second": 3.361, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.01579361333962561, |
|
"grad_norm": 0.9017981290817261, |
|
"learning_rate": 3.189374007355917e-05, |
|
"loss": 0.7322, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.015856536101934875, |
|
"grad_norm": 0.784041166305542, |
|
"learning_rate": 3.151889938850445e-05, |
|
"loss": 0.7285, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.01591945886424414, |
|
"grad_norm": 0.8192543387413025, |
|
"learning_rate": 3.114525791558398e-05, |
|
"loss": 0.7494, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.015982381626553407, |
|
"grad_norm": 0.7851762175559998, |
|
"learning_rate": 3.0772839899857464e-05, |
|
"loss": 0.8101, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.01604530438886267, |
|
"grad_norm": 0.6676402688026428, |
|
"learning_rate": 3.0401669506996256e-05, |
|
"loss": 0.7238, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.016108227151171935, |
|
"grad_norm": 0.6430277824401855, |
|
"learning_rate": 3.003177082171523e-05, |
|
"loss": 0.7751, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.016171149913481203, |
|
"grad_norm": 0.6109689474105835, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 0.7016, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.016234072675790467, |
|
"grad_norm": 0.6223710775375366, |
|
"learning_rate": 2.9295884498599414e-05, |
|
"loss": 0.7558, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.01629699543809973, |
|
"grad_norm": 0.6088758707046509, |
|
"learning_rate": 2.8929944611373554e-05, |
|
"loss": 0.7324, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.016359918200409, |
|
"grad_norm": 0.5436838865280151, |
|
"learning_rate": 2.8565371929847284e-05, |
|
"loss": 0.6773, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.016422840962718264, |
|
"grad_norm": 0.5857248306274414, |
|
"learning_rate": 2.8202190110619493e-05, |
|
"loss": 0.7294, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.016485763725027528, |
|
"grad_norm": 0.5640518069267273, |
|
"learning_rate": 2.784042272003794e-05, |
|
"loss": 0.709, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.016548686487336792, |
|
"grad_norm": 0.6003010869026184, |
|
"learning_rate": 2.7480093232670158e-05, |
|
"loss": 0.8003, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.01661160924964606, |
|
"grad_norm": 0.6455116868019104, |
|
"learning_rate": 2.712122502978024e-05, |
|
"loss": 0.7943, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.016674532011955324, |
|
"grad_norm": 0.5922764539718628, |
|
"learning_rate": 2.6763841397811573e-05, |
|
"loss": 0.723, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.01673745477426459, |
|
"grad_norm": 0.5494192838668823, |
|
"learning_rate": 2.64079655268759e-05, |
|
"loss": 0.7551, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.016800377536573857, |
|
"grad_norm": 0.6045573949813843, |
|
"learning_rate": 2.605362050924848e-05, |
|
"loss": 0.836, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.01686330029888312, |
|
"grad_norm": 0.601265549659729, |
|
"learning_rate": 2.57008293378697e-05, |
|
"loss": 0.7308, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.016926223061192385, |
|
"grad_norm": 0.6050641536712646, |
|
"learning_rate": 2.534961490485313e-05, |
|
"loss": 0.7363, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.016989145823501653, |
|
"grad_norm": 0.5850335955619812, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.6857, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.017052068585810917, |
|
"grad_norm": 0.5859873294830322, |
|
"learning_rate": 2.4652007309320498e-05, |
|
"loss": 0.738, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.01711499134812018, |
|
"grad_norm": 0.5759584307670593, |
|
"learning_rate": 2.430565941356157e-05, |
|
"loss": 0.7003, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.01717791411042945, |
|
"grad_norm": 0.568520724773407, |
|
"learning_rate": 2.3960978786741877e-05, |
|
"loss": 0.6084, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.017240836872738714, |
|
"grad_norm": 0.5545433759689331, |
|
"learning_rate": 2.361798779469336e-05, |
|
"loss": 0.6588, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.017303759635047978, |
|
"grad_norm": 0.5918384194374084, |
|
"learning_rate": 2.3276708693609943e-05, |
|
"loss": 0.7023, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.017366682397357246, |
|
"grad_norm": 0.5700395107269287, |
|
"learning_rate": 2.2937163628603435e-05, |
|
"loss": 0.738, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.01742960515966651, |
|
"grad_norm": 0.5376745462417603, |
|
"learning_rate": 2.259937463226651e-05, |
|
"loss": 0.7345, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.017492527921975774, |
|
"grad_norm": 0.5538782477378845, |
|
"learning_rate": 2.2263363623243054e-05, |
|
"loss": 0.7037, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.01755545068428504, |
|
"grad_norm": 0.5734386444091797, |
|
"learning_rate": 2.192915240480596e-05, |
|
"loss": 0.6757, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.017618373446594306, |
|
"grad_norm": 0.6654324531555176, |
|
"learning_rate": 2.1596762663442218e-05, |
|
"loss": 0.7392, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.01768129620890357, |
|
"grad_norm": 0.6778509020805359, |
|
"learning_rate": 2.1266215967445824e-05, |
|
"loss": 0.7742, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.017744218971212835, |
|
"grad_norm": 0.5633755326271057, |
|
"learning_rate": 2.0937533765518187e-05, |
|
"loss": 0.6952, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.017807141733522103, |
|
"grad_norm": 0.6291466951370239, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.691, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.017870064495831367, |
|
"grad_norm": 0.6446377038955688, |
|
"learning_rate": 2.0285848032369137e-05, |
|
"loss": 0.7298, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.01793298725814063, |
|
"grad_norm": 0.6178613901138306, |
|
"learning_rate": 1.996288678810105e-05, |
|
"loss": 0.6924, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.0179959100204499, |
|
"grad_norm": 0.5817842483520508, |
|
"learning_rate": 1.9641874609064443e-05, |
|
"loss": 0.6699, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.018058832782759163, |
|
"grad_norm": 0.6187658309936523, |
|
"learning_rate": 1.932283232527956e-05, |
|
"loss": 0.7599, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.018121755545068428, |
|
"grad_norm": 0.594488799571991, |
|
"learning_rate": 1.9005780638942982e-05, |
|
"loss": 0.7158, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.018184678307377695, |
|
"grad_norm": 0.6769745945930481, |
|
"learning_rate": 1.8690740123084316e-05, |
|
"loss": 0.7599, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.01824760106968696, |
|
"grad_norm": 0.6411002278327942, |
|
"learning_rate": 1.837773122023114e-05, |
|
"loss": 0.7427, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.018310523831996224, |
|
"grad_norm": 0.6521747708320618, |
|
"learning_rate": 1.8066774241082612e-05, |
|
"loss": 0.7293, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.01837344659430549, |
|
"grad_norm": 0.6916580200195312, |
|
"learning_rate": 1.7757889363191483e-05, |
|
"loss": 0.7342, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.018436369356614756, |
|
"grad_norm": 0.6757326722145081, |
|
"learning_rate": 1.745109662965481e-05, |
|
"loss": 0.7279, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.01849929211892402, |
|
"grad_norm": 0.6940987706184387, |
|
"learning_rate": 1.714641594781347e-05, |
|
"loss": 0.7043, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.018562214881233285, |
|
"grad_norm": 0.733060896396637, |
|
"learning_rate": 1.684386708796025e-05, |
|
"loss": 0.7816, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.018625137643542553, |
|
"grad_norm": 0.6440508961677551, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 0.7928, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.018688060405851817, |
|
"grad_norm": 0.7244382500648499, |
|
"learning_rate": 1.62452432224612e-05, |
|
"loss": 0.7793, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.01875098316816108, |
|
"grad_norm": 0.7814667224884033, |
|
"learning_rate": 1.5949207060660138e-05, |
|
"loss": 0.7547, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.01881390593047035, |
|
"grad_norm": 0.8025712370872498, |
|
"learning_rate": 1.5655380406016235e-05, |
|
"loss": 0.8875, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.018876828692779613, |
|
"grad_norm": 0.7935718894004822, |
|
"learning_rate": 1.536378232452003e-05, |
|
"loss": 0.8375, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.018876828692779613, |
|
"eval_loss": 0.7229760885238647, |
|
"eval_runtime": 1993.5614, |
|
"eval_samples_per_second": 13.427, |
|
"eval_steps_per_second": 3.357, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.018939751455088878, |
|
"grad_norm": 0.6894450783729553, |
|
"learning_rate": 1.5074431737553157e-05, |
|
"loss": 0.7444, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.019002674217398145, |
|
"grad_norm": 0.6789616942405701, |
|
"learning_rate": 1.4787347420660541e-05, |
|
"loss": 0.7414, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.01906559697970741, |
|
"grad_norm": 0.7387858033180237, |
|
"learning_rate": 1.4502548002332088e-05, |
|
"loss": 0.7901, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.019128519742016674, |
|
"grad_norm": 0.6375578045845032, |
|
"learning_rate": 1.422005196279395e-05, |
|
"loss": 0.6224, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.019191442504325938, |
|
"grad_norm": 0.7267337441444397, |
|
"learning_rate": 1.3939877632809278e-05, |
|
"loss": 0.7372, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.019254365266635206, |
|
"grad_norm": 0.6559906601905823, |
|
"learning_rate": 1.3662043192488849e-05, |
|
"loss": 0.7395, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.01931728802894447, |
|
"grad_norm": 0.6069934964179993, |
|
"learning_rate": 1.338656667011134e-05, |
|
"loss": 0.7066, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.019380210791253735, |
|
"grad_norm": 0.6166743040084839, |
|
"learning_rate": 1.3113465940953495e-05, |
|
"loss": 0.7179, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.019443133553563002, |
|
"grad_norm": 0.7378328442573547, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 0.7793, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.019506056315872267, |
|
"grad_norm": 0.6443124413490295, |
|
"learning_rate": 1.257446259144494e-05, |
|
"loss": 0.7157, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.01956897907818153, |
|
"grad_norm": 0.5678501129150391, |
|
"learning_rate": 1.2308594946249163e-05, |
|
"loss": 0.7045, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.0196319018404908, |
|
"grad_norm": 0.6501772403717041, |
|
"learning_rate": 1.204517304231343e-05, |
|
"loss": 0.7822, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.019694824602800063, |
|
"grad_norm": 0.5607977509498596, |
|
"learning_rate": 1.178421397270758e-05, |
|
"loss": 0.6364, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.019757747365109327, |
|
"grad_norm": 0.6143155694007874, |
|
"learning_rate": 1.1525734670691701e-05, |
|
"loss": 0.7455, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.019820670127418595, |
|
"grad_norm": 0.5648209452629089, |
|
"learning_rate": 1.1269751908617277e-05, |
|
"loss": 0.6528, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.01988359288972786, |
|
"grad_norm": 0.5944265723228455, |
|
"learning_rate": 1.1016282296838887e-05, |
|
"loss": 0.7613, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.019946515652037124, |
|
"grad_norm": 0.643968403339386, |
|
"learning_rate": 1.0765342282636416e-05, |
|
"loss": 0.7188, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.02000943841434639, |
|
"grad_norm": 0.5592343211174011, |
|
"learning_rate": 1.0516948149147754e-05, |
|
"loss": 0.7049, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.020072361176655656, |
|
"grad_norm": 0.5851125717163086, |
|
"learning_rate": 1.0271116014312293e-05, |
|
"loss": 0.684, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.02013528393896492, |
|
"grad_norm": 0.6031101942062378, |
|
"learning_rate": 1.0027861829824952e-05, |
|
"loss": 0.7494, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.020198206701274184, |
|
"grad_norm": 0.5463895201683044, |
|
"learning_rate": 9.787201380101157e-06, |
|
"loss": 0.6859, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.020261129463583452, |
|
"grad_norm": 0.5959100723266602, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.7081, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.020324052225892716, |
|
"grad_norm": 0.6021979451179504, |
|
"learning_rate": 9.313723980074018e-06, |
|
"loss": 0.7436, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.02038697498820198, |
|
"grad_norm": 0.5806286931037903, |
|
"learning_rate": 9.080937753040646e-06, |
|
"loss": 0.6912, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.02044989775051125, |
|
"grad_norm": 0.5796635150909424, |
|
"learning_rate": 8.850806705317183e-06, |
|
"loss": 0.6992, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.020512820512820513, |
|
"grad_norm": 0.5652114152908325, |
|
"learning_rate": 8.623345769777514e-06, |
|
"loss": 0.64, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.020575743275129777, |
|
"grad_norm": 0.5946077704429626, |
|
"learning_rate": 8.398569706035792e-06, |
|
"loss": 0.6805, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.020638666037439045, |
|
"grad_norm": 0.5399512052536011, |
|
"learning_rate": 8.176493099488663e-06, |
|
"loss": 0.6716, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.02070158879974831, |
|
"grad_norm": 0.5655412673950195, |
|
"learning_rate": 7.957130360368898e-06, |
|
"loss": 0.6901, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.020764511562057573, |
|
"grad_norm": 0.5360169410705566, |
|
"learning_rate": 7.740495722810271e-06, |
|
"loss": 0.6565, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.02082743432436684, |
|
"grad_norm": 0.5519473552703857, |
|
"learning_rate": 7.526603243923957e-06, |
|
"loss": 0.6759, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.020890357086676106, |
|
"grad_norm": 0.6575800180435181, |
|
"learning_rate": 7.315466802886401e-06, |
|
"loss": 0.7317, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.02095327984898537, |
|
"grad_norm": 0.5860657691955566, |
|
"learning_rate": 7.107100100038671e-06, |
|
"loss": 0.7099, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.021016202611294634, |
|
"grad_norm": 0.5698612332344055, |
|
"learning_rate": 6.901516655997536e-06, |
|
"loss": 0.6872, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.021079125373603902, |
|
"grad_norm": 0.6097320914268494, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 0.6447, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.021142048135913166, |
|
"grad_norm": 0.645949125289917, |
|
"learning_rate": 6.498752722928042e-06, |
|
"loss": 0.742, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.02120497089822243, |
|
"grad_norm": 0.6028321385383606, |
|
"learning_rate": 6.301598368674105e-06, |
|
"loss": 0.677, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.0212678936605317, |
|
"grad_norm": 0.6465116143226624, |
|
"learning_rate": 6.107279541079769e-06, |
|
"loss": 0.7062, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.021330816422840963, |
|
"grad_norm": 0.5715640187263489, |
|
"learning_rate": 5.915808849215304e-06, |
|
"loss": 0.6684, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.021393739185150227, |
|
"grad_norm": 0.6341311931610107, |
|
"learning_rate": 5.727198717339511e-06, |
|
"loss": 0.7215, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.021456661947459495, |
|
"grad_norm": 0.5924866795539856, |
|
"learning_rate": 5.54146138409355e-06, |
|
"loss": 0.6658, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.02151958470976876, |
|
"grad_norm": 0.6136569380760193, |
|
"learning_rate": 5.358608901706802e-06, |
|
"loss": 0.7325, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.021582507472078023, |
|
"grad_norm": 0.6864028573036194, |
|
"learning_rate": 5.178653135214812e-06, |
|
"loss": 0.7257, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.02164543023438729, |
|
"grad_norm": 0.7612612247467041, |
|
"learning_rate": 5.001605761689398e-06, |
|
"loss": 0.7581, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.021708352996696555, |
|
"grad_norm": 0.6227344870567322, |
|
"learning_rate": 4.827478269480895e-06, |
|
"loss": 0.627, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.02177127575900582, |
|
"grad_norm": 0.7695668339729309, |
|
"learning_rate": 4.65628195747273e-06, |
|
"loss": 0.7749, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.021834198521315087, |
|
"grad_norm": 0.7633891105651855, |
|
"learning_rate": 4.488027934348271e-06, |
|
"loss": 0.7784, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.02189712128362435, |
|
"grad_norm": 0.7154384851455688, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 0.7125, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.021960044045933616, |
|
"grad_norm": 0.831569492816925, |
|
"learning_rate": 4.16039023417088e-06, |
|
"loss": 0.7757, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.02202296680824288, |
|
"grad_norm": 1.0594757795333862, |
|
"learning_rate": 4.001027817058789e-06, |
|
"loss": 0.8293, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.02202296680824288, |
|
"eval_loss": 0.6978302001953125, |
|
"eval_runtime": 1993.2949, |
|
"eval_samples_per_second": 13.429, |
|
"eval_steps_per_second": 3.357, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.022085889570552148, |
|
"grad_norm": 0.6362000703811646, |
|
"learning_rate": 3.844650207332562e-06, |
|
"loss": 0.6765, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.022148812332861412, |
|
"grad_norm": 0.5263391733169556, |
|
"learning_rate": 3.691267552111183e-06, |
|
"loss": 0.6235, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.022211735095170677, |
|
"grad_norm": 0.5333245992660522, |
|
"learning_rate": 3.54088980417534e-06, |
|
"loss": 0.6275, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.022274657857479944, |
|
"grad_norm": 0.6024055480957031, |
|
"learning_rate": 3.393526721321616e-06, |
|
"loss": 0.6344, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.02233758061978921, |
|
"grad_norm": 0.56202632188797, |
|
"learning_rate": 3.249187865729264e-06, |
|
"loss": 0.667, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.022400503382098473, |
|
"grad_norm": 0.6126339435577393, |
|
"learning_rate": 3.1078826033397843e-06, |
|
"loss": 0.7108, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.02246342614440774, |
|
"grad_norm": 0.5996975302696228, |
|
"learning_rate": 2.9696201032491434e-06, |
|
"loss": 0.6846, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.022526348906717005, |
|
"grad_norm": 0.6046069264411926, |
|
"learning_rate": 2.8344093371128424e-06, |
|
"loss": 0.6398, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.02258927166902627, |
|
"grad_norm": 0.5685755014419556, |
|
"learning_rate": 2.70225907856374e-06, |
|
"loss": 0.5936, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.022652194431335537, |
|
"grad_norm": 0.5690946578979492, |
|
"learning_rate": 2.573177902642726e-06, |
|
"loss": 0.6451, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.0227151171936448, |
|
"grad_norm": 0.6844279170036316, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.7307, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.022778039955954066, |
|
"grad_norm": 0.5816213488578796, |
|
"learning_rate": 2.324256102563188e-06, |
|
"loss": 0.6763, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.02284096271826333, |
|
"grad_norm": 0.6306328177452087, |
|
"learning_rate": 2.204431630583548e-06, |
|
"loss": 0.7201, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.022903885480572598, |
|
"grad_norm": 0.5840525031089783, |
|
"learning_rate": 2.087708544541689e-06, |
|
"loss": 0.6618, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.022966808242881862, |
|
"grad_norm": 0.5952986478805542, |
|
"learning_rate": 1.974094418431388e-06, |
|
"loss": 0.7408, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.023029731005191127, |
|
"grad_norm": 0.6626343131065369, |
|
"learning_rate": 1.8635966245104664e-06, |
|
"loss": 0.7118, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.023092653767500394, |
|
"grad_norm": 0.561195433139801, |
|
"learning_rate": 1.7562223328224325e-06, |
|
"loss": 0.6572, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.02315557652980966, |
|
"grad_norm": 0.605522632598877, |
|
"learning_rate": 1.6519785107311891e-06, |
|
"loss": 0.6959, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.023218499292118923, |
|
"grad_norm": 0.6060965657234192, |
|
"learning_rate": 1.5508719224689717e-06, |
|
"loss": 0.7534, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.02328142205442819, |
|
"grad_norm": 0.5786964893341064, |
|
"learning_rate": 1.4529091286973995e-06, |
|
"loss": 0.6837, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.023344344816737455, |
|
"grad_norm": 0.554511308670044, |
|
"learning_rate": 1.358096486081778e-06, |
|
"loss": 0.6633, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.02340726757904672, |
|
"grad_norm": 0.561881422996521, |
|
"learning_rate": 1.2664401468786114e-06, |
|
"loss": 0.6633, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.023470190341355987, |
|
"grad_norm": 0.6631070971488953, |
|
"learning_rate": 1.1779460585363944e-06, |
|
"loss": 0.7833, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.02353311310366525, |
|
"grad_norm": 0.5902887582778931, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 0.7127, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.023596035865974516, |
|
"grad_norm": 0.5488584041595459, |
|
"learning_rate": 1.0104673978866164e-06, |
|
"loss": 0.65, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.02365895862828378, |
|
"grad_norm": 0.549534022808075, |
|
"learning_rate": 9.314936930293283e-07, |
|
"loss": 0.649, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.023721881390593048, |
|
"grad_norm": 0.6492491364479065, |
|
"learning_rate": 8.557039732283944e-07, |
|
"loss": 0.7449, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.023784804152902312, |
|
"grad_norm": 0.603671669960022, |
|
"learning_rate": 7.83103156370113e-07, |
|
"loss": 0.6647, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.023847726915211576, |
|
"grad_norm": 0.60027676820755, |
|
"learning_rate": 7.136959534174592e-07, |
|
"loss": 0.6557, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.023910649677520844, |
|
"grad_norm": 0.5764393210411072, |
|
"learning_rate": 6.474868681043578e-07, |
|
"loss": 0.6449, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.02397357243983011, |
|
"grad_norm": 0.6350451707839966, |
|
"learning_rate": 5.844801966434832e-07, |
|
"loss": 0.7452, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.024036495202139373, |
|
"grad_norm": 0.5474420189857483, |
|
"learning_rate": 5.246800274474439e-07, |
|
"loss": 0.6484, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.02409941796444864, |
|
"grad_norm": 0.6028091907501221, |
|
"learning_rate": 4.680902408635335e-07, |
|
"loss": 0.7082, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.024162340726757905, |
|
"grad_norm": 0.6048222184181213, |
|
"learning_rate": 4.1471450892189846e-07, |
|
"loss": 0.7395, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.02422526348906717, |
|
"grad_norm": 0.6276510953903198, |
|
"learning_rate": 3.6455629509730136e-07, |
|
"loss": 0.6713, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.024288186251376437, |
|
"grad_norm": 0.5658819675445557, |
|
"learning_rate": 3.1761885408435054e-07, |
|
"loss": 0.6423, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.0243511090136857, |
|
"grad_norm": 0.6045307517051697, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 0.6774, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.024414031775994965, |
|
"grad_norm": 0.589821457862854, |
|
"learning_rate": 2.334182641175686e-07, |
|
"loss": 0.6838, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.024476954538304233, |
|
"grad_norm": 0.5758050680160522, |
|
"learning_rate": 1.9616057881935436e-07, |
|
"loss": 0.704, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.024539877300613498, |
|
"grad_norm": 0.619819700717926, |
|
"learning_rate": 1.6213459328950352e-07, |
|
"loss": 0.658, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.024602800062922762, |
|
"grad_norm": 0.6557899713516235, |
|
"learning_rate": 1.3134251542544774e-07, |
|
"loss": 0.6725, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.024665722825232026, |
|
"grad_norm": 0.6236681342124939, |
|
"learning_rate": 1.0378634328099269e-07, |
|
"loss": 0.6717, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.024728645587541294, |
|
"grad_norm": 0.6441242098808289, |
|
"learning_rate": 7.946786493666647e-08, |
|
"loss": 0.7222, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.024791568349850558, |
|
"grad_norm": 0.6634582281112671, |
|
"learning_rate": 5.838865838366792e-08, |
|
"loss": 0.7297, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.024854491112159822, |
|
"grad_norm": 0.6949135661125183, |
|
"learning_rate": 4.055009142152067e-08, |
|
"loss": 0.6816, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.02491741387446909, |
|
"grad_norm": 0.7237213850021362, |
|
"learning_rate": 2.595332156925534e-08, |
|
"loss": 0.6885, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.024980336636778355, |
|
"grad_norm": 0.7857411503791809, |
|
"learning_rate": 1.4599295990352924e-08, |
|
"loss": 0.767, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.02504325939908762, |
|
"grad_norm": 0.8778714537620544, |
|
"learning_rate": 6.488751431266149e-09, |
|
"loss": 0.7826, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.025106182161396887, |
|
"grad_norm": 0.8500857353210449, |
|
"learning_rate": 1.622214173602199e-09, |
|
"loss": 0.7738, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.02516910492370615, |
|
"grad_norm": 0.9884042143821716, |
|
"learning_rate": 0.0, |
|
"loss": 0.8359, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.02516910492370615, |
|
"eval_loss": 0.6945011615753174, |
|
"eval_runtime": 1990.894, |
|
"eval_samples_per_second": 13.445, |
|
"eval_steps_per_second": 3.361, |
|
"step": 400 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 400, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.034054314983424e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|