|
{ |
|
"best_metric": 0.28383296728134155, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.8592910848549946, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004296455424274973, |
|
"grad_norm": 0.3637087345123291, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8103, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004296455424274973, |
|
"eval_loss": 2.395321846008301, |
|
"eval_runtime": 27.149, |
|
"eval_samples_per_second": 14.439, |
|
"eval_steps_per_second": 3.61, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008592910848549946, |
|
"grad_norm": 0.5150983333587646, |
|
"learning_rate": 2e-05, |
|
"loss": 1.1294, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01288936627282492, |
|
"grad_norm": 0.6066375970840454, |
|
"learning_rate": 3e-05, |
|
"loss": 1.2703, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.017185821697099892, |
|
"grad_norm": 0.5858229994773865, |
|
"learning_rate": 4e-05, |
|
"loss": 1.3463, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.021482277121374866, |
|
"grad_norm": 0.6776552200317383, |
|
"learning_rate": 5e-05, |
|
"loss": 1.2584, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02577873254564984, |
|
"grad_norm": 0.606339693069458, |
|
"learning_rate": 6e-05, |
|
"loss": 1.2734, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03007518796992481, |
|
"grad_norm": 0.6469846963882446, |
|
"learning_rate": 7e-05, |
|
"loss": 1.1792, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.034371643394199784, |
|
"grad_norm": 0.5836963057518005, |
|
"learning_rate": 8e-05, |
|
"loss": 1.1451, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03866809881847476, |
|
"grad_norm": 0.5917362570762634, |
|
"learning_rate": 9e-05, |
|
"loss": 0.9683, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04296455424274973, |
|
"grad_norm": 0.8102052211761475, |
|
"learning_rate": 0.0001, |
|
"loss": 0.8943, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.047261009667024706, |
|
"grad_norm": 0.6655777096748352, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 0.739, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05155746509129968, |
|
"grad_norm": 0.7479202747344971, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 0.7188, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.055853920515574654, |
|
"grad_norm": 0.7106651663780212, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 0.6258, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06015037593984962, |
|
"grad_norm": 0.5525503754615784, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 0.6035, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0644468313641246, |
|
"grad_norm": 0.45747628808021545, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 0.5847, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06874328678839957, |
|
"grad_norm": 0.40945225954055786, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 0.4871, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07303974221267455, |
|
"grad_norm": 0.5227960348129272, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 0.4554, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.07733619763694952, |
|
"grad_norm": 0.3709550201892853, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 0.4575, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.08163265306122448, |
|
"grad_norm": 0.35335588455200195, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 0.4892, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.08592910848549946, |
|
"grad_norm": 0.41486284136772156, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.5214, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09022556390977443, |
|
"grad_norm": 0.4549630582332611, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 0.6713, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.09452201933404941, |
|
"grad_norm": 0.4528137445449829, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 0.6117, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.09881847475832438, |
|
"grad_norm": 0.47058239579200745, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 0.5362, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.10311493018259936, |
|
"grad_norm": 0.5150260329246521, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.6023, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.10741138560687433, |
|
"grad_norm": 0.4763900339603424, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 0.6096, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.11170784103114931, |
|
"grad_norm": 0.4491954445838928, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 0.5383, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.11600429645542427, |
|
"grad_norm": 0.37725311517715454, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 0.5781, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.12030075187969924, |
|
"grad_norm": 0.48423704504966736, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.5574, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.12459720730397422, |
|
"grad_norm": 0.4108438193798065, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.4861, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1288936627282492, |
|
"grad_norm": 0.40415096282958984, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 0.5138, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.13319011815252416, |
|
"grad_norm": 0.4338022470474243, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 0.5121, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.13748657357679914, |
|
"grad_norm": 0.639772355556488, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 0.5894, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.14178302900107412, |
|
"grad_norm": 0.4205399453639984, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.4535, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.1460794844253491, |
|
"grad_norm": 0.48501643538475037, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 0.4877, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.15037593984962405, |
|
"grad_norm": 0.5719529390335083, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 0.5284, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.15467239527389903, |
|
"grad_norm": 0.45450690388679504, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 0.4434, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.158968850698174, |
|
"grad_norm": 0.5546385645866394, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 0.5373, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.16326530612244897, |
|
"grad_norm": 0.5521821975708008, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 0.5153, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.16756176154672395, |
|
"grad_norm": 0.4645980894565582, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 0.4344, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.17185821697099893, |
|
"grad_norm": 0.5077691078186035, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.4193, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1761546723952739, |
|
"grad_norm": 0.6285257339477539, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 0.4512, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.18045112781954886, |
|
"grad_norm": 0.7595347166061401, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.5183, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.18474758324382384, |
|
"grad_norm": 0.6894388794898987, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 0.4657, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.18904403866809882, |
|
"grad_norm": 0.6750317811965942, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 0.4279, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1933404940923738, |
|
"grad_norm": 0.9030443429946899, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 0.6653, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.19763694951664876, |
|
"grad_norm": 0.7086827158927917, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 0.5327, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.20193340494092374, |
|
"grad_norm": 0.887897789478302, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 0.5247, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.20622986036519872, |
|
"grad_norm": 0.8612096309661865, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.5503, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 1.9472252130508423, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 0.6008, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.21482277121374865, |
|
"grad_norm": 2.0724754333496094, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 0.4043, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.21482277121374865, |
|
"eval_loss": 0.4881606698036194, |
|
"eval_runtime": 27.6633, |
|
"eval_samples_per_second": 14.17, |
|
"eval_steps_per_second": 3.543, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.21911922663802363, |
|
"grad_norm": 0.44782543182373047, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 0.3396, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.22341568206229862, |
|
"grad_norm": 0.5945596098899841, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 0.4482, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.22771213748657357, |
|
"grad_norm": 0.5052401423454285, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 0.4347, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.23200859291084855, |
|
"grad_norm": 0.4160156548023224, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 0.4182, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.23630504833512353, |
|
"grad_norm": 0.3482251763343811, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 0.4322, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.24060150375939848, |
|
"grad_norm": 0.3227390944957733, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.4172, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.24489795918367346, |
|
"grad_norm": 0.29572582244873047, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.3785, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.24919441460794844, |
|
"grad_norm": 0.35989752411842346, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.3799, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2534908700322234, |
|
"grad_norm": 0.385657399892807, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.3866, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2577873254564984, |
|
"grad_norm": 0.3310921788215637, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.3486, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2620837808807734, |
|
"grad_norm": 0.3422428071498871, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.3028, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2663802363050483, |
|
"grad_norm": 0.31822556257247925, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.297, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2706766917293233, |
|
"grad_norm": 0.3344513177871704, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.3756, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.2749731471535983, |
|
"grad_norm": 0.2931362986564636, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 0.4118, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.27926960257787325, |
|
"grad_norm": 0.3394784927368164, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 0.3515, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.28356605800214824, |
|
"grad_norm": 0.317038357257843, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 0.3846, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2878625134264232, |
|
"grad_norm": 0.27550095319747925, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.3494, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.2921589688506982, |
|
"grad_norm": 0.3647957146167755, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.3669, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2964554242749731, |
|
"grad_norm": 0.34974852204322815, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.3403, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.3007518796992481, |
|
"grad_norm": 0.35612866282463074, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.3475, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3050483351235231, |
|
"grad_norm": 0.342788428068161, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.3453, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.30934479054779807, |
|
"grad_norm": 0.3459738492965698, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 0.3246, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.31364124597207305, |
|
"grad_norm": 0.4553300440311432, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 0.4952, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.317937701396348, |
|
"grad_norm": 0.37579211592674255, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.3655, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.322234156820623, |
|
"grad_norm": 0.42491230368614197, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.3746, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.32653061224489793, |
|
"grad_norm": 0.4613445997238159, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.483, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3308270676691729, |
|
"grad_norm": 0.43322092294692993, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.3813, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3351235230934479, |
|
"grad_norm": 0.43508535623550415, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.4426, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.3394199785177229, |
|
"grad_norm": 0.3977321982383728, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.3795, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.34371643394199786, |
|
"grad_norm": 0.36455708742141724, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.3186, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.34801288936627284, |
|
"grad_norm": 0.3600999116897583, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 0.3009, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3523093447905478, |
|
"grad_norm": 0.3982136845588684, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 0.3315, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.35660580021482274, |
|
"grad_norm": 0.3989538550376892, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.3751, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3609022556390977, |
|
"grad_norm": 0.5305337905883789, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.4166, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.3651987110633727, |
|
"grad_norm": 0.5150339007377625, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 0.3933, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3694951664876477, |
|
"grad_norm": 0.6410700678825378, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.5038, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.37379162191192267, |
|
"grad_norm": 0.45256391167640686, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 0.3523, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.37808807733619765, |
|
"grad_norm": 0.5341379046440125, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 0.3802, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.38238453276047263, |
|
"grad_norm": 0.48598381876945496, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 0.4376, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.3866809881847476, |
|
"grad_norm": 0.7272056341171265, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.4245, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.39097744360902253, |
|
"grad_norm": 0.5760740041732788, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.3184, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3952738990332975, |
|
"grad_norm": 0.7006295323371887, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.4, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3995703544575725, |
|
"grad_norm": 0.635381281375885, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 0.4197, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.4038668098818475, |
|
"grad_norm": 0.5333322882652283, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 0.3237, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.40816326530612246, |
|
"grad_norm": 0.6799944043159485, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 0.352, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.41245972073039744, |
|
"grad_norm": 0.6566717028617859, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 0.3699, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.4167561761546724, |
|
"grad_norm": 0.7269343733787537, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 0.3615, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 0.982455313205719, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 0.5638, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4253490870032223, |
|
"grad_norm": 1.0673147439956665, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 0.3045, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4296455424274973, |
|
"grad_norm": 0.8240067362785339, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 0.2006, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4296455424274973, |
|
"eval_loss": 0.36948394775390625, |
|
"eval_runtime": 27.625, |
|
"eval_samples_per_second": 14.19, |
|
"eval_steps_per_second": 3.548, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4339419978517723, |
|
"grad_norm": 0.3018046021461487, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 0.2772, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.43823845327604727, |
|
"grad_norm": 0.4544106721878052, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 0.3834, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.44253490870032225, |
|
"grad_norm": 0.42616453766822815, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 0.4236, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.44683136412459723, |
|
"grad_norm": 0.39616507291793823, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 0.3726, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.45112781954887216, |
|
"grad_norm": 0.3530295789241791, |
|
"learning_rate": 5e-05, |
|
"loss": 0.3426, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.45542427497314714, |
|
"grad_norm": 0.3598898947238922, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 0.3308, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.4597207303974221, |
|
"grad_norm": 0.40101805329322815, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 0.3732, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.4640171858216971, |
|
"grad_norm": 0.3908984065055847, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 0.397, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.4683136412459721, |
|
"grad_norm": 0.3244476020336151, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.3096, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.47261009667024706, |
|
"grad_norm": 0.3139345645904541, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.3013, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.47690655209452204, |
|
"grad_norm": 0.330341637134552, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 0.2965, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.48120300751879697, |
|
"grad_norm": 0.32211899757385254, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.2924, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.48549946294307195, |
|
"grad_norm": 0.34253570437431335, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 0.2737, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.4897959183673469, |
|
"grad_norm": 0.3796078562736511, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.3203, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.4940923737916219, |
|
"grad_norm": 0.2806931436061859, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 0.2558, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.4983888292158969, |
|
"grad_norm": 0.29928621649742126, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.2506, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.5026852846401718, |
|
"grad_norm": 0.32902368903160095, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.2792, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5069817400644469, |
|
"grad_norm": 0.35551485419273376, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.2569, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.5112781954887218, |
|
"grad_norm": 0.34765690565109253, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.2555, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5155746509129968, |
|
"grad_norm": 0.4137231707572937, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.3171, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5198711063372717, |
|
"grad_norm": 0.4357423186302185, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.3468, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5241675617615468, |
|
"grad_norm": 0.40948164463043213, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.2999, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5284640171858217, |
|
"grad_norm": 0.5223953723907471, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.3805, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5327604726100966, |
|
"grad_norm": 0.5923753976821899, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.4177, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5370569280343717, |
|
"grad_norm": 0.5544208884239197, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 0.3538, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5413533834586466, |
|
"grad_norm": 0.482532799243927, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 0.3155, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5456498388829216, |
|
"grad_norm": 0.5058260560035706, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 0.2917, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5499462943071965, |
|
"grad_norm": 0.4469728469848633, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.3291, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5542427497314716, |
|
"grad_norm": 0.4818762242794037, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 0.322, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5585392051557465, |
|
"grad_norm": 0.4870593249797821, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.3583, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5628356605800214, |
|
"grad_norm": 0.5139895677566528, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 0.3634, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5671321160042965, |
|
"grad_norm": 0.5461882948875427, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 0.3086, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.5530821084976196, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 0.3809, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.5757250268528464, |
|
"grad_norm": 0.5471766591072083, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 0.3511, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.5800214822771214, |
|
"grad_norm": 1.1091238260269165, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.3018, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5843179377013964, |
|
"grad_norm": 0.6000388264656067, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.4016, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.5886143931256713, |
|
"grad_norm": 0.6467744708061218, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 0.3761, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.5929108485499462, |
|
"grad_norm": 0.5213028192520142, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 0.3199, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.5972073039742213, |
|
"grad_norm": 0.5980802774429321, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 0.2639, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.6015037593984962, |
|
"grad_norm": 0.5194615721702576, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 0.3044, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6058002148227712, |
|
"grad_norm": 0.6527299880981445, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 0.3249, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6100966702470462, |
|
"grad_norm": 0.5338525772094727, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 0.2942, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.6143931256713212, |
|
"grad_norm": 0.7685671448707581, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.4126, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.6186895810955961, |
|
"grad_norm": 0.7643443942070007, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 0.3202, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.6229860365198711, |
|
"grad_norm": 0.6941141486167908, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 0.3295, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6272824919441461, |
|
"grad_norm": 0.9832645058631897, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 0.383, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.631578947368421, |
|
"grad_norm": 0.9147282242774963, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 0.3818, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.635875402792696, |
|
"grad_norm": 0.786456823348999, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 0.2811, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.640171858216971, |
|
"grad_norm": 0.8777568936347961, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 0.3394, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.644468313641246, |
|
"grad_norm": 1.1083598136901855, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 0.2503, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.644468313641246, |
|
"eval_loss": 0.29443585872650146, |
|
"eval_runtime": 27.5946, |
|
"eval_samples_per_second": 14.206, |
|
"eval_steps_per_second": 3.551, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6487647690655209, |
|
"grad_norm": 0.22200804948806763, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 0.1898, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6530612244897959, |
|
"grad_norm": 0.3020473122596741, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 0.2586, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.6573576799140709, |
|
"grad_norm": 0.30981576442718506, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 0.2909, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6616541353383458, |
|
"grad_norm": 0.315384179353714, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 0.2658, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.6659505907626209, |
|
"grad_norm": 0.33048534393310547, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 0.2657, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6702470461868958, |
|
"grad_norm": 0.32182732224464417, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 0.2593, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.6745435016111708, |
|
"grad_norm": 0.3142335116863251, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 0.248, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.6788399570354458, |
|
"grad_norm": 0.3597651422023773, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 0.2663, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.6831364124597207, |
|
"grad_norm": 0.3391364812850952, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 0.3082, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.6874328678839957, |
|
"grad_norm": 0.37021851539611816, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 0.3347, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6917293233082706, |
|
"grad_norm": 0.3648050129413605, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 0.25, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.6960257787325457, |
|
"grad_norm": 0.38995301723480225, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.3182, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.7003222341568206, |
|
"grad_norm": 0.3318188488483429, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 0.2502, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.7046186895810956, |
|
"grad_norm": 0.4105289578437805, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 0.2879, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.7089151450053706, |
|
"grad_norm": 0.41487324237823486, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 0.2896, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.7132116004296455, |
|
"grad_norm": 0.4566102623939514, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 0.305, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.7175080558539205, |
|
"grad_norm": 0.41210034489631653, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 0.2921, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.7218045112781954, |
|
"grad_norm": 0.3816104531288147, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 0.2386, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.7261009667024705, |
|
"grad_norm": 0.3759080171585083, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 0.2741, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.7303974221267454, |
|
"grad_norm": 0.4245615303516388, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 0.3129, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7346938775510204, |
|
"grad_norm": 0.3779868185520172, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 0.2264, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.7389903329752954, |
|
"grad_norm": 0.4320870339870453, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 0.2906, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.7432867883995704, |
|
"grad_norm": 0.44205906987190247, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 0.2615, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.7475832438238453, |
|
"grad_norm": 0.42238447070121765, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 0.2557, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7518796992481203, |
|
"grad_norm": 0.4558790326118469, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 0.2831, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7561761546723953, |
|
"grad_norm": 0.46227309107780457, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 0.2811, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.7604726100966702, |
|
"grad_norm": 0.44037771224975586, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 0.2352, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.7647690655209453, |
|
"grad_norm": 0.5309369564056396, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 0.2967, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.7690655209452202, |
|
"grad_norm": 0.4705249071121216, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 0.2921, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.7733619763694952, |
|
"grad_norm": 0.5692561268806458, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 0.295, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7776584317937701, |
|
"grad_norm": 0.4589831531047821, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.2454, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.7819548872180451, |
|
"grad_norm": 0.5403589010238647, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 0.3231, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.7862513426423201, |
|
"grad_norm": 0.5366402268409729, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 0.259, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.790547798066595, |
|
"grad_norm": 0.6617404818534851, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 0.3531, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.7948442534908701, |
|
"grad_norm": 0.6814762949943542, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 0.3581, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.799140708915145, |
|
"grad_norm": 0.608530580997467, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 0.2712, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.80343716433942, |
|
"grad_norm": 0.5977616906166077, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 0.2808, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.807733619763695, |
|
"grad_norm": 0.5270123481750488, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 0.2795, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.8120300751879699, |
|
"grad_norm": 0.4959504306316376, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 0.2393, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.8163265306122449, |
|
"grad_norm": 0.6505959630012512, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 0.3502, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8206229860365198, |
|
"grad_norm": 0.6694881319999695, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 0.3087, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.8249194414607949, |
|
"grad_norm": 0.535914421081543, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 0.2231, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.8292158968850698, |
|
"grad_norm": 0.6710368394851685, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 0.2621, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.8335123523093448, |
|
"grad_norm": 0.7072320580482483, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 0.281, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.8378088077336198, |
|
"grad_norm": 0.7714508175849915, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 0.2793, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.8421052631578947, |
|
"grad_norm": 0.6197285056114197, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 0.2679, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.8464017185821697, |
|
"grad_norm": 0.7446801662445068, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 0.3773, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8506981740064447, |
|
"grad_norm": 0.7205499410629272, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 0.316, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.8549946294307197, |
|
"grad_norm": 0.8960843086242676, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 0.3137, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.8592910848549946, |
|
"grad_norm": 0.6217831373214722, |
|
"learning_rate": 0.0, |
|
"loss": 0.1739, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8592910848549946, |
|
"eval_loss": 0.28383296728134155, |
|
"eval_runtime": 27.6227, |
|
"eval_samples_per_second": 14.191, |
|
"eval_steps_per_second": 3.548, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.661403865382912e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|