|
[ |
|
{ |
|
"loss": 13.2232, |
|
"grad_norm": 6.157718181610107, |
|
"learning_rate": 3.824091778202677e-06, |
|
"epoch": 0.03824091778202677, |
|
"step": 20 |
|
}, |
|
{ |
|
"loss": 13.2113, |
|
"grad_norm": 6.144223213195801, |
|
"learning_rate": 7.648183556405354e-06, |
|
"epoch": 0.07648183556405354, |
|
"step": 40 |
|
}, |
|
{ |
|
"loss": 13.1625, |
|
"grad_norm": 6.032691955566406, |
|
"learning_rate": 1.147227533460803e-05, |
|
"epoch": 0.1147227533460803, |
|
"step": 60 |
|
}, |
|
{ |
|
"loss": 13.1174, |
|
"grad_norm": 5.916826248168945, |
|
"learning_rate": 1.529636711281071e-05, |
|
"epoch": 0.15296367112810708, |
|
"step": 80 |
|
}, |
|
{ |
|
"loss": 13.0512, |
|
"grad_norm": 5.7198004722595215, |
|
"learning_rate": 1.9120458891013384e-05, |
|
"epoch": 0.19120458891013384, |
|
"step": 100 |
|
}, |
|
{ |
|
"loss": 12.9931, |
|
"grad_norm": 5.554529666900635, |
|
"learning_rate": 2.294455066921606e-05, |
|
"epoch": 0.2294455066921606, |
|
"step": 120 |
|
}, |
|
{ |
|
"loss": 12.9042, |
|
"grad_norm": 5.364482879638672, |
|
"learning_rate": 2.6768642447418742e-05, |
|
"epoch": 0.2676864244741874, |
|
"step": 140 |
|
}, |
|
{ |
|
"loss": 12.8488, |
|
"grad_norm": 5.091818809509277, |
|
"learning_rate": 3.059273422562142e-05, |
|
"epoch": 0.30592734225621415, |
|
"step": 160 |
|
}, |
|
{ |
|
"loss": 12.7715, |
|
"grad_norm": 5.035643577575684, |
|
"learning_rate": 3.441682600382409e-05, |
|
"epoch": 0.3441682600382409, |
|
"step": 180 |
|
}, |
|
{ |
|
"loss": 12.6747, |
|
"grad_norm": 4.819056987762451, |
|
"learning_rate": 3.824091778202677e-05, |
|
"epoch": 0.3824091778202677, |
|
"step": 200 |
|
}, |
|
{ |
|
"loss": 12.6366, |
|
"grad_norm": 4.597919464111328, |
|
"learning_rate": 4.2065009560229444e-05, |
|
"epoch": 0.42065009560229444, |
|
"step": 220 |
|
}, |
|
{ |
|
"loss": 12.5388, |
|
"grad_norm": 4.551054954528809, |
|
"learning_rate": 4.588910133843212e-05, |
|
"epoch": 0.4588910133843212, |
|
"step": 240 |
|
}, |
|
{ |
|
"loss": 12.4527, |
|
"grad_norm": 4.289029598236084, |
|
"learning_rate": 4.97131931166348e-05, |
|
"epoch": 0.497131931166348, |
|
"step": 260 |
|
}, |
|
{ |
|
"loss": 12.3809, |
|
"grad_norm": 4.291126728057861, |
|
"learning_rate": 5.3537284894837484e-05, |
|
"epoch": 0.5353728489483748, |
|
"step": 280 |
|
}, |
|
{ |
|
"loss": 12.3185, |
|
"grad_norm": 4.090356826782227, |
|
"learning_rate": 5.736137667304016e-05, |
|
"epoch": 0.5736137667304015, |
|
"step": 300 |
|
}, |
|
{ |
|
"loss": 12.2101, |
|
"grad_norm": 3.9066805839538574, |
|
"learning_rate": 6.118546845124283e-05, |
|
"epoch": 0.6118546845124283, |
|
"step": 320 |
|
}, |
|
{ |
|
"loss": 12.1255, |
|
"grad_norm": 3.937908887863159, |
|
"learning_rate": 6.50095602294455e-05, |
|
"epoch": 0.6500956022944551, |
|
"step": 340 |
|
}, |
|
{ |
|
"loss": 12.0543, |
|
"grad_norm": 3.919820547103882, |
|
"learning_rate": 6.883365200764819e-05, |
|
"epoch": 0.6883365200764818, |
|
"step": 360 |
|
}, |
|
{ |
|
"loss": 11.9417, |
|
"grad_norm": 3.8298187255859375, |
|
"learning_rate": 7.265774378585087e-05, |
|
"epoch": 0.7265774378585086, |
|
"step": 380 |
|
}, |
|
{ |
|
"loss": 11.8644, |
|
"grad_norm": 3.7290520668029785, |
|
"learning_rate": 7.648183556405354e-05, |
|
"epoch": 0.7648183556405354, |
|
"step": 400 |
|
}, |
|
{ |
|
"loss": 11.8122, |
|
"grad_norm": 3.76938533782959, |
|
"learning_rate": 8.030592734225622e-05, |
|
"epoch": 0.8030592734225621, |
|
"step": 420 |
|
}, |
|
{ |
|
"loss": 11.7117, |
|
"grad_norm": 3.8729827404022217, |
|
"learning_rate": 8.413001912045889e-05, |
|
"epoch": 0.8413001912045889, |
|
"step": 440 |
|
}, |
|
{ |
|
"loss": 11.6245, |
|
"grad_norm": 3.7178924083709717, |
|
"learning_rate": 8.795411089866157e-05, |
|
"epoch": 0.8795411089866156, |
|
"step": 460 |
|
}, |
|
{ |
|
"loss": 11.547, |
|
"grad_norm": 3.7744827270507812, |
|
"learning_rate": 9.177820267686424e-05, |
|
"epoch": 0.9177820267686424, |
|
"step": 480 |
|
}, |
|
{ |
|
"loss": 11.4699, |
|
"grad_norm": 3.6705052852630615, |
|
"learning_rate": 9.560229445506692e-05, |
|
"epoch": 0.9560229445506692, |
|
"step": 500 |
|
}, |
|
{ |
|
"loss": 11.3851, |
|
"grad_norm": 3.6992719173431396, |
|
"learning_rate": 9.94263862332696e-05, |
|
"epoch": 0.994263862332696, |
|
"step": 520 |
|
}, |
|
{ |
|
"eval_loss": 11.029301643371582, |
|
"eval_accuracy": 0.18062563067608475, |
|
"eval_runtime": 592.6353, |
|
"eval_samples_per_second": 25.083, |
|
"eval_steps_per_second": 25.083, |
|
"epoch": 1.0, |
|
"step": 523 |
|
}, |
|
{ |
|
"loss": 11.2589, |
|
"grad_norm": 3.6838159561157227, |
|
"learning_rate": 9.963883577650308e-05, |
|
"epoch": 1.0325047801147227, |
|
"step": 540 |
|
}, |
|
{ |
|
"loss": 11.1668, |
|
"grad_norm": 3.7846293449401855, |
|
"learning_rate": 9.921393669003612e-05, |
|
"epoch": 1.0707456978967496, |
|
"step": 560 |
|
}, |
|
{ |
|
"loss": 11.1053, |
|
"grad_norm": 3.688416004180908, |
|
"learning_rate": 9.878903760356916e-05, |
|
"epoch": 1.1089866156787762, |
|
"step": 580 |
|
}, |
|
{ |
|
"loss": 11.019, |
|
"grad_norm": 3.724273204803467, |
|
"learning_rate": 9.836413851710219e-05, |
|
"epoch": 1.147227533460803, |
|
"step": 600 |
|
}, |
|
{ |
|
"loss": 10.9731, |
|
"grad_norm": 3.840388536453247, |
|
"learning_rate": 9.793923943063523e-05, |
|
"epoch": 1.1854684512428297, |
|
"step": 620 |
|
}, |
|
{ |
|
"loss": 10.875, |
|
"grad_norm": 3.828228235244751, |
|
"learning_rate": 9.751434034416827e-05, |
|
"epoch": 1.2237093690248566, |
|
"step": 640 |
|
}, |
|
{ |
|
"loss": 10.8111, |
|
"grad_norm": 3.891911745071411, |
|
"learning_rate": 9.70894412577013e-05, |
|
"epoch": 1.2619502868068833, |
|
"step": 660 |
|
}, |
|
{ |
|
"loss": 10.7717, |
|
"grad_norm": 3.8076562881469727, |
|
"learning_rate": 9.666454217123433e-05, |
|
"epoch": 1.3001912045889101, |
|
"step": 680 |
|
}, |
|
{ |
|
"loss": 10.6723, |
|
"grad_norm": 3.8521881103515625, |
|
"learning_rate": 9.623964308476737e-05, |
|
"epoch": 1.338432122370937, |
|
"step": 700 |
|
}, |
|
{ |
|
"loss": 10.5961, |
|
"grad_norm": 3.8576488494873047, |
|
"learning_rate": 9.58147439983004e-05, |
|
"epoch": 1.3766730401529637, |
|
"step": 720 |
|
}, |
|
{ |
|
"loss": 10.5392, |
|
"grad_norm": 4.002715587615967, |
|
"learning_rate": 9.538984491183345e-05, |
|
"epoch": 1.4149139579349903, |
|
"step": 740 |
|
}, |
|
{ |
|
"loss": 10.5018, |
|
"grad_norm": 3.8657026290893555, |
|
"learning_rate": 9.496494582536648e-05, |
|
"epoch": 1.4531548757170172, |
|
"step": 760 |
|
}, |
|
{ |
|
"loss": 10.4325, |
|
"grad_norm": 3.9424169063568115, |
|
"learning_rate": 9.454004673889951e-05, |
|
"epoch": 1.491395793499044, |
|
"step": 780 |
|
}, |
|
{ |
|
"loss": 10.3722, |
|
"grad_norm": 3.9783968925476074, |
|
"learning_rate": 9.411514765243256e-05, |
|
"epoch": 1.5296367112810707, |
|
"step": 800 |
|
}, |
|
{ |
|
"loss": 10.3069, |
|
"grad_norm": 4.081951141357422, |
|
"learning_rate": 9.369024856596559e-05, |
|
"epoch": 1.5678776290630974, |
|
"step": 820 |
|
}, |
|
{ |
|
"loss": 10.2527, |
|
"grad_norm": 4.141290187835693, |
|
"learning_rate": 9.326534947949863e-05, |
|
"epoch": 1.6061185468451242, |
|
"step": 840 |
|
}, |
|
{ |
|
"loss": 10.2271, |
|
"grad_norm": 4.294083595275879, |
|
"learning_rate": 9.284045039303167e-05, |
|
"epoch": 1.644359464627151, |
|
"step": 860 |
|
}, |
|
{ |
|
"loss": 10.1756, |
|
"grad_norm": 4.727543354034424, |
|
"learning_rate": 9.241555130656469e-05, |
|
"epoch": 1.682600382409178, |
|
"step": 880 |
|
}, |
|
{ |
|
"loss": 10.0936, |
|
"grad_norm": 4.068965911865234, |
|
"learning_rate": 9.199065222009773e-05, |
|
"epoch": 1.7208413001912046, |
|
"step": 900 |
|
}, |
|
{ |
|
"loss": 10.0937, |
|
"grad_norm": 4.025643825531006, |
|
"learning_rate": 9.156575313363077e-05, |
|
"epoch": 1.7590822179732313, |
|
"step": 920 |
|
}, |
|
{ |
|
"loss": 10.0217, |
|
"grad_norm": 4.317354679107666, |
|
"learning_rate": 9.11408540471638e-05, |
|
"epoch": 1.7973231357552581, |
|
"step": 940 |
|
}, |
|
{ |
|
"loss": 9.9743, |
|
"grad_norm": 4.101060390472412, |
|
"learning_rate": 9.071595496069684e-05, |
|
"epoch": 1.835564053537285, |
|
"step": 960 |
|
}, |
|
{ |
|
"loss": 9.9879, |
|
"grad_norm": 4.225609302520752, |
|
"learning_rate": 9.029105587422988e-05, |
|
"epoch": 1.8738049713193117, |
|
"step": 980 |
|
}, |
|
{ |
|
"loss": 9.8273, |
|
"grad_norm": 4.3140668869018555, |
|
"learning_rate": 8.986615678776292e-05, |
|
"epoch": 1.9120458891013383, |
|
"step": 1000 |
|
}, |
|
{ |
|
"loss": 9.8136, |
|
"grad_norm": 4.199500560760498, |
|
"learning_rate": 8.944125770129594e-05, |
|
"epoch": 1.9502868068833652, |
|
"step": 1020 |
|
}, |
|
{ |
|
"loss": 9.7596, |
|
"grad_norm": 4.457912445068359, |
|
"learning_rate": 8.901635861482898e-05, |
|
"epoch": 1.988527724665392, |
|
"step": 1040 |
|
}, |
|
{ |
|
"eval_loss": 9.140138626098633, |
|
"eval_accuracy": 0.3849983181971073, |
|
"eval_runtime": 461.2724, |
|
"eval_samples_per_second": 32.226, |
|
"eval_steps_per_second": 32.226, |
|
"epoch": 2.0, |
|
"step": 1046 |
|
}, |
|
{ |
|
"loss": 9.714, |
|
"grad_norm": 4.428006172180176, |
|
"learning_rate": 8.859145952836202e-05, |
|
"epoch": 2.026768642447419, |
|
"step": 1060 |
|
}, |
|
{ |
|
"loss": 9.5508, |
|
"grad_norm": 4.372852325439453, |
|
"learning_rate": 8.816656044189505e-05, |
|
"epoch": 2.0650095602294454, |
|
"step": 1080 |
|
}, |
|
{ |
|
"loss": 9.6096, |
|
"grad_norm": 4.381687641143799, |
|
"learning_rate": 8.774166135542809e-05, |
|
"epoch": 2.1032504780114722, |
|
"step": 1100 |
|
}, |
|
{ |
|
"loss": 9.5077, |
|
"grad_norm": 4.5865631103515625, |
|
"learning_rate": 8.731676226896113e-05, |
|
"epoch": 2.141491395793499, |
|
"step": 1120 |
|
}, |
|
{ |
|
"loss": 9.5044, |
|
"grad_norm": 4.363910675048828, |
|
"learning_rate": 8.689186318249416e-05, |
|
"epoch": 2.179732313575526, |
|
"step": 1140 |
|
}, |
|
{ |
|
"loss": 9.4205, |
|
"grad_norm": 4.577084541320801, |
|
"learning_rate": 8.646696409602721e-05, |
|
"epoch": 2.2179732313575524, |
|
"step": 1160 |
|
}, |
|
{ |
|
"loss": 9.4317, |
|
"grad_norm": 4.576254367828369, |
|
"learning_rate": 8.604206500956024e-05, |
|
"epoch": 2.2562141491395793, |
|
"step": 1180 |
|
}, |
|
{ |
|
"loss": 9.3607, |
|
"grad_norm": 4.4399847984313965, |
|
"learning_rate": 8.561716592309326e-05, |
|
"epoch": 2.294455066921606, |
|
"step": 1200 |
|
}, |
|
{ |
|
"loss": 9.2533, |
|
"grad_norm": 4.595015525817871, |
|
"learning_rate": 8.51922668366263e-05, |
|
"epoch": 2.332695984703633, |
|
"step": 1220 |
|
}, |
|
{ |
|
"loss": 9.3384, |
|
"grad_norm": 4.900874614715576, |
|
"learning_rate": 8.476736775015934e-05, |
|
"epoch": 2.3709369024856595, |
|
"step": 1240 |
|
}, |
|
{ |
|
"loss": 9.293, |
|
"grad_norm": 4.594742774963379, |
|
"learning_rate": 8.434246866369238e-05, |
|
"epoch": 2.4091778202676863, |
|
"step": 1260 |
|
}, |
|
{ |
|
"loss": 9.1986, |
|
"grad_norm": 4.587216377258301, |
|
"learning_rate": 8.391756957722541e-05, |
|
"epoch": 2.447418738049713, |
|
"step": 1280 |
|
}, |
|
{ |
|
"loss": 9.1358, |
|
"grad_norm": 4.735275745391846, |
|
"learning_rate": 8.349267049075845e-05, |
|
"epoch": 2.48565965583174, |
|
"step": 1300 |
|
}, |
|
{ |
|
"loss": 9.1284, |
|
"grad_norm": 4.627840995788574, |
|
"learning_rate": 8.306777140429149e-05, |
|
"epoch": 2.5239005736137665, |
|
"step": 1320 |
|
}, |
|
{ |
|
"loss": 9.0949, |
|
"grad_norm": 4.658718585968018, |
|
"learning_rate": 8.264287231782451e-05, |
|
"epoch": 2.5621414913957934, |
|
"step": 1340 |
|
}, |
|
{ |
|
"loss": 9.0312, |
|
"grad_norm": 4.875549793243408, |
|
"learning_rate": 8.221797323135755e-05, |
|
"epoch": 2.6003824091778203, |
|
"step": 1360 |
|
}, |
|
{ |
|
"loss": 8.9949, |
|
"grad_norm": 4.683437347412109, |
|
"learning_rate": 8.179307414489059e-05, |
|
"epoch": 2.638623326959847, |
|
"step": 1380 |
|
}, |
|
{ |
|
"loss": 8.9705, |
|
"grad_norm": 4.861114025115967, |
|
"learning_rate": 8.136817505842362e-05, |
|
"epoch": 2.676864244741874, |
|
"step": 1400 |
|
}, |
|
{ |
|
"loss": 8.9483, |
|
"grad_norm": 4.727562427520752, |
|
"learning_rate": 8.094327597195667e-05, |
|
"epoch": 2.7151051625239004, |
|
"step": 1420 |
|
}, |
|
{ |
|
"loss": 8.9254, |
|
"grad_norm": 4.8202948570251465, |
|
"learning_rate": 8.05183768854897e-05, |
|
"epoch": 2.7533460803059273, |
|
"step": 1440 |
|
}, |
|
{ |
|
"loss": 8.8768, |
|
"grad_norm": 4.926464557647705, |
|
"learning_rate": 8.009347779902273e-05, |
|
"epoch": 2.791586998087954, |
|
"step": 1460 |
|
}, |
|
{ |
|
"loss": 8.8044, |
|
"grad_norm": 4.7756028175354, |
|
"learning_rate": 7.966857871255578e-05, |
|
"epoch": 2.8298279158699806, |
|
"step": 1480 |
|
}, |
|
{ |
|
"loss": 8.7788, |
|
"grad_norm": 4.888403415679932, |
|
"learning_rate": 7.92436796260888e-05, |
|
"epoch": 2.8680688336520075, |
|
"step": 1500 |
|
}, |
|
{ |
|
"loss": 8.8032, |
|
"grad_norm": 4.943230152130127, |
|
"learning_rate": 7.881878053962184e-05, |
|
"epoch": 2.9063097514340344, |
|
"step": 1520 |
|
}, |
|
{ |
|
"loss": 8.7507, |
|
"grad_norm": 5.011119842529297, |
|
"learning_rate": 7.839388145315488e-05, |
|
"epoch": 2.9445506692160612, |
|
"step": 1540 |
|
}, |
|
{ |
|
"loss": 8.7136, |
|
"grad_norm": 5.068637847900391, |
|
"learning_rate": 7.796898236668791e-05, |
|
"epoch": 2.982791586998088, |
|
"step": 1560 |
|
}, |
|
{ |
|
"eval_loss": 7.882061958312988, |
|
"eval_accuracy": 0.52418432559704, |
|
"eval_runtime": 418.5795, |
|
"eval_samples_per_second": 35.513, |
|
"eval_steps_per_second": 35.513, |
|
"epoch": 3.0, |
|
"step": 1569 |
|
}, |
|
{ |
|
"loss": 8.6104, |
|
"grad_norm": 4.895749092102051, |
|
"learning_rate": 7.754408328022095e-05, |
|
"epoch": 3.0210325047801145, |
|
"step": 1580 |
|
}, |
|
{ |
|
"loss": 8.6136, |
|
"grad_norm": 5.138400077819824, |
|
"learning_rate": 7.711918419375399e-05, |
|
"epoch": 3.0592734225621414, |
|
"step": 1600 |
|
}, |
|
{ |
|
"loss": 8.5866, |
|
"grad_norm": 5.270049571990967, |
|
"learning_rate": 7.669428510728702e-05, |
|
"epoch": 3.0975143403441683, |
|
"step": 1620 |
|
}, |
|
{ |
|
"loss": 8.492, |
|
"grad_norm": 5.178355693817139, |
|
"learning_rate": 7.626938602082006e-05, |
|
"epoch": 3.135755258126195, |
|
"step": 1640 |
|
}, |
|
{ |
|
"loss": 8.4897, |
|
"grad_norm": 5.312692165374756, |
|
"learning_rate": 7.58444869343531e-05, |
|
"epoch": 3.173996175908222, |
|
"step": 1660 |
|
}, |
|
{ |
|
"loss": 8.4441, |
|
"grad_norm": 5.227985382080078, |
|
"learning_rate": 7.541958784788614e-05, |
|
"epoch": 3.2122370936902485, |
|
"step": 1680 |
|
}, |
|
{ |
|
"loss": 8.4722, |
|
"grad_norm": 5.042078495025635, |
|
"learning_rate": 7.499468876141916e-05, |
|
"epoch": 3.2504780114722753, |
|
"step": 1700 |
|
}, |
|
{ |
|
"loss": 8.3105, |
|
"grad_norm": 5.250526428222656, |
|
"learning_rate": 7.45697896749522e-05, |
|
"epoch": 3.288718929254302, |
|
"step": 1720 |
|
}, |
|
{ |
|
"loss": 8.3308, |
|
"grad_norm": 5.22187614440918, |
|
"learning_rate": 7.414489058848524e-05, |
|
"epoch": 3.3269598470363286, |
|
"step": 1740 |
|
}, |
|
{ |
|
"loss": 8.2969, |
|
"grad_norm": 5.491254806518555, |
|
"learning_rate": 7.371999150201827e-05, |
|
"epoch": 3.3652007648183555, |
|
"step": 1760 |
|
}, |
|
{ |
|
"loss": 8.2593, |
|
"grad_norm": 5.482990741729736, |
|
"learning_rate": 7.329509241555131e-05, |
|
"epoch": 3.4034416826003824, |
|
"step": 1780 |
|
}, |
|
{ |
|
"loss": 8.3087, |
|
"grad_norm": 5.359766960144043, |
|
"learning_rate": 7.287019332908435e-05, |
|
"epoch": 3.4416826003824093, |
|
"step": 1800 |
|
}, |
|
{ |
|
"loss": 8.2664, |
|
"grad_norm": 5.788363456726074, |
|
"learning_rate": 7.244529424261737e-05, |
|
"epoch": 3.479923518164436, |
|
"step": 1820 |
|
}, |
|
{ |
|
"loss": 8.2543, |
|
"grad_norm": 5.335551738739014, |
|
"learning_rate": 7.202039515615043e-05, |
|
"epoch": 3.5181644359464626, |
|
"step": 1840 |
|
}, |
|
{ |
|
"loss": 8.2604, |
|
"grad_norm": 5.465627193450928, |
|
"learning_rate": 7.159549606968345e-05, |
|
"epoch": 3.5564053537284894, |
|
"step": 1860 |
|
}, |
|
{ |
|
"loss": 8.1616, |
|
"grad_norm": 5.594823837280273, |
|
"learning_rate": 7.117059698321648e-05, |
|
"epoch": 3.5946462715105163, |
|
"step": 1880 |
|
}, |
|
{ |
|
"loss": 8.1582, |
|
"grad_norm": 5.58858060836792, |
|
"learning_rate": 7.074569789674953e-05, |
|
"epoch": 3.632887189292543, |
|
"step": 1900 |
|
}, |
|
{ |
|
"loss": 8.1061, |
|
"grad_norm": 5.514508247375488, |
|
"learning_rate": 7.032079881028256e-05, |
|
"epoch": 3.67112810707457, |
|
"step": 1920 |
|
}, |
|
{ |
|
"loss": 8.0912, |
|
"grad_norm": 5.644900321960449, |
|
"learning_rate": 6.98958997238156e-05, |
|
"epoch": 3.7093690248565965, |
|
"step": 1940 |
|
}, |
|
{ |
|
"loss": 7.9596, |
|
"grad_norm": 5.701168060302734, |
|
"learning_rate": 6.947100063734864e-05, |
|
"epoch": 3.7476099426386233, |
|
"step": 1960 |
|
}, |
|
{ |
|
"loss": 8.0403, |
|
"grad_norm": 5.880733013153076, |
|
"learning_rate": 6.904610155088167e-05, |
|
"epoch": 3.78585086042065, |
|
"step": 1980 |
|
}, |
|
{ |
|
"loss": 7.9666, |
|
"grad_norm": 5.638689994812012, |
|
"learning_rate": 6.86212024644147e-05, |
|
"epoch": 3.8240917782026767, |
|
"step": 2000 |
|
}, |
|
{ |
|
"loss": 7.9633, |
|
"grad_norm": 6.002101421356201, |
|
"learning_rate": 6.819630337794775e-05, |
|
"epoch": 3.8623326959847035, |
|
"step": 2020 |
|
}, |
|
{ |
|
"loss": 7.8817, |
|
"grad_norm": 5.628067493438721, |
|
"learning_rate": 6.777140429148077e-05, |
|
"epoch": 3.9005736137667304, |
|
"step": 2040 |
|
}, |
|
{ |
|
"loss": 7.9118, |
|
"grad_norm": 6.128510475158691, |
|
"learning_rate": 6.734650520501381e-05, |
|
"epoch": 3.9388145315487573, |
|
"step": 2060 |
|
}, |
|
{ |
|
"loss": 7.848, |
|
"grad_norm": 5.620929718017578, |
|
"learning_rate": 6.692160611854685e-05, |
|
"epoch": 3.977055449330784, |
|
"step": 2080 |
|
}, |
|
{ |
|
"eval_loss": 6.945113658905029, |
|
"eval_accuracy": 0.6143962327615203, |
|
"eval_runtime": 367.1966, |
|
"eval_samples_per_second": 40.482, |
|
"eval_steps_per_second": 40.482, |
|
"epoch": 4.0, |
|
"step": 2092 |
|
}, |
|
{ |
|
"loss": 7.8607, |
|
"grad_norm": 5.820804595947266, |
|
"learning_rate": 6.649670703207989e-05, |
|
"epoch": 4.015296367112811, |
|
"step": 2100 |
|
}, |
|
{ |
|
"loss": 7.7072, |
|
"grad_norm": 5.6448493003845215, |
|
"learning_rate": 6.607180794561292e-05, |
|
"epoch": 4.053537284894838, |
|
"step": 2120 |
|
}, |
|
{ |
|
"loss": 7.772, |
|
"grad_norm": 6.283373832702637, |
|
"learning_rate": 6.564690885914596e-05, |
|
"epoch": 4.091778202676864, |
|
"step": 2140 |
|
}, |
|
{ |
|
"loss": 7.7211, |
|
"grad_norm": 6.125846862792969, |
|
"learning_rate": 6.5222009772679e-05, |
|
"epoch": 4.130019120458891, |
|
"step": 2160 |
|
}, |
|
{ |
|
"loss": 7.6563, |
|
"grad_norm": 5.701002597808838, |
|
"learning_rate": 6.479711068621202e-05, |
|
"epoch": 4.168260038240918, |
|
"step": 2180 |
|
}, |
|
{ |
|
"loss": 7.711, |
|
"grad_norm": 5.910340785980225, |
|
"learning_rate": 6.437221159974506e-05, |
|
"epoch": 4.2065009560229445, |
|
"step": 2200 |
|
}, |
|
{ |
|
"loss": 7.7582, |
|
"grad_norm": 5.8003082275390625, |
|
"learning_rate": 6.39473125132781e-05, |
|
"epoch": 4.244741873804971, |
|
"step": 2220 |
|
}, |
|
{ |
|
"loss": 7.6215, |
|
"grad_norm": 5.95621395111084, |
|
"learning_rate": 6.352241342681113e-05, |
|
"epoch": 4.282982791586998, |
|
"step": 2240 |
|
}, |
|
{ |
|
"loss": 7.5932, |
|
"grad_norm": 5.836912155151367, |
|
"learning_rate": 6.309751434034417e-05, |
|
"epoch": 4.321223709369025, |
|
"step": 2260 |
|
}, |
|
{ |
|
"loss": 7.5122, |
|
"grad_norm": 6.156320095062256, |
|
"learning_rate": 6.267261525387721e-05, |
|
"epoch": 4.359464627151052, |
|
"step": 2280 |
|
}, |
|
{ |
|
"loss": 7.5488, |
|
"grad_norm": 5.937085151672363, |
|
"learning_rate": 6.224771616741024e-05, |
|
"epoch": 4.397705544933078, |
|
"step": 2300 |
|
}, |
|
{ |
|
"loss": 7.5972, |
|
"grad_norm": 5.949016571044922, |
|
"learning_rate": 6.182281708094328e-05, |
|
"epoch": 4.435946462715105, |
|
"step": 2320 |
|
}, |
|
{ |
|
"loss": 7.4327, |
|
"grad_norm": 6.26347541809082, |
|
"learning_rate": 6.139791799447631e-05, |
|
"epoch": 4.474187380497132, |
|
"step": 2340 |
|
}, |
|
{ |
|
"loss": 7.555, |
|
"grad_norm": 6.376476287841797, |
|
"learning_rate": 6.097301890800935e-05, |
|
"epoch": 4.512428298279159, |
|
"step": 2360 |
|
}, |
|
{ |
|
"loss": 7.5463, |
|
"grad_norm": 6.2988200187683105, |
|
"learning_rate": 6.054811982154238e-05, |
|
"epoch": 4.550669216061186, |
|
"step": 2380 |
|
}, |
|
{ |
|
"loss": 7.4637, |
|
"grad_norm": 5.916903972625732, |
|
"learning_rate": 6.012322073507543e-05, |
|
"epoch": 4.588910133843212, |
|
"step": 2400 |
|
}, |
|
{ |
|
"loss": 7.3857, |
|
"grad_norm": 5.896063327789307, |
|
"learning_rate": 5.969832164860846e-05, |
|
"epoch": 4.627151051625239, |
|
"step": 2420 |
|
}, |
|
{ |
|
"loss": 7.4363, |
|
"grad_norm": 6.14431619644165, |
|
"learning_rate": 5.927342256214149e-05, |
|
"epoch": 4.665391969407266, |
|
"step": 2440 |
|
}, |
|
{ |
|
"loss": 7.406, |
|
"grad_norm": 6.2994256019592285, |
|
"learning_rate": 5.8848523475674533e-05, |
|
"epoch": 4.7036328871892925, |
|
"step": 2460 |
|
}, |
|
{ |
|
"loss": 7.338, |
|
"grad_norm": 6.134793758392334, |
|
"learning_rate": 5.8423624389207567e-05, |
|
"epoch": 4.741873804971319, |
|
"step": 2480 |
|
}, |
|
{ |
|
"loss": 7.3912, |
|
"grad_norm": 6.245213031768799, |
|
"learning_rate": 5.79987253027406e-05, |
|
"epoch": 4.780114722753346, |
|
"step": 2500 |
|
}, |
|
{ |
|
"loss": 7.3548, |
|
"grad_norm": 6.118636131286621, |
|
"learning_rate": 5.757382621627364e-05, |
|
"epoch": 4.818355640535373, |
|
"step": 2520 |
|
}, |
|
{ |
|
"loss": 7.3119, |
|
"grad_norm": 6.391002178192139, |
|
"learning_rate": 5.714892712980667e-05, |
|
"epoch": 4.8565965583174, |
|
"step": 2540 |
|
}, |
|
{ |
|
"loss": 7.2119, |
|
"grad_norm": 6.539446830749512, |
|
"learning_rate": 5.6724028043339705e-05, |
|
"epoch": 4.894837476099426, |
|
"step": 2560 |
|
}, |
|
{ |
|
"loss": 7.2505, |
|
"grad_norm": 6.162653923034668, |
|
"learning_rate": 5.6299128956872745e-05, |
|
"epoch": 4.933078393881453, |
|
"step": 2580 |
|
}, |
|
{ |
|
"loss": 7.1912, |
|
"grad_norm": 6.580591678619385, |
|
"learning_rate": 5.587422987040578e-05, |
|
"epoch": 4.97131931166348, |
|
"step": 2600 |
|
}, |
|
{ |
|
"eval_loss": 6.262951850891113, |
|
"eval_accuracy": 0.6821392532795156, |
|
"eval_runtime": 76.4531, |
|
"eval_samples_per_second": 194.433, |
|
"eval_steps_per_second": 194.433, |
|
"epoch": 5.0, |
|
"step": 2615 |
|
}, |
|
{ |
|
"loss": 7.1863, |
|
"grad_norm": 6.838705062866211, |
|
"learning_rate": 5.544933078393881e-05, |
|
"epoch": 5.009560229445507, |
|
"step": 2620 |
|
}, |
|
{ |
|
"loss": 7.1259, |
|
"grad_norm": 6.260281562805176, |
|
"learning_rate": 5.502443169747186e-05, |
|
"epoch": 5.047801147227533, |
|
"step": 2640 |
|
}, |
|
{ |
|
"loss": 7.1559, |
|
"grad_norm": 6.463006496429443, |
|
"learning_rate": 5.459953261100489e-05, |
|
"epoch": 5.08604206500956, |
|
"step": 2660 |
|
}, |
|
{ |
|
"loss": 7.1318, |
|
"grad_norm": 6.499185562133789, |
|
"learning_rate": 5.4174633524537924e-05, |
|
"epoch": 5.124282982791587, |
|
"step": 2680 |
|
}, |
|
{ |
|
"loss": 7.0993, |
|
"grad_norm": 6.508650302886963, |
|
"learning_rate": 5.3749734438070964e-05, |
|
"epoch": 5.162523900573614, |
|
"step": 2700 |
|
}, |
|
{ |
|
"loss": 7.0823, |
|
"grad_norm": 6.573218822479248, |
|
"learning_rate": 5.3324835351604e-05, |
|
"epoch": 5.2007648183556405, |
|
"step": 2720 |
|
}, |
|
{ |
|
"loss": 7.0839, |
|
"grad_norm": 6.863697052001953, |
|
"learning_rate": 5.289993626513703e-05, |
|
"epoch": 5.239005736137667, |
|
"step": 2740 |
|
}, |
|
{ |
|
"loss": 7.0723, |
|
"grad_norm": 6.305070877075195, |
|
"learning_rate": 5.247503717867007e-05, |
|
"epoch": 5.277246653919694, |
|
"step": 2760 |
|
}, |
|
{ |
|
"loss": 6.9592, |
|
"grad_norm": 6.715279579162598, |
|
"learning_rate": 5.20501380922031e-05, |
|
"epoch": 5.315487571701721, |
|
"step": 2780 |
|
}, |
|
{ |
|
"loss": 7.0275, |
|
"grad_norm": 6.625701904296875, |
|
"learning_rate": 5.1625239005736136e-05, |
|
"epoch": 5.353728489483748, |
|
"step": 2800 |
|
}, |
|
{ |
|
"loss": 6.9146, |
|
"grad_norm": 6.717496871948242, |
|
"learning_rate": 5.120033991926918e-05, |
|
"epoch": 5.3919694072657744, |
|
"step": 2820 |
|
}, |
|
{ |
|
"loss": 6.9984, |
|
"grad_norm": 6.500243186950684, |
|
"learning_rate": 5.0775440832802216e-05, |
|
"epoch": 5.430210325047801, |
|
"step": 2840 |
|
}, |
|
{ |
|
"loss": 6.9367, |
|
"grad_norm": 6.41347074508667, |
|
"learning_rate": 5.035054174633524e-05, |
|
"epoch": 5.468451242829828, |
|
"step": 2860 |
|
}, |
|
{ |
|
"loss": 6.9997, |
|
"grad_norm": 6.83429479598999, |
|
"learning_rate": 4.992564265986828e-05, |
|
"epoch": 5.506692160611855, |
|
"step": 2880 |
|
}, |
|
{ |
|
"loss": 6.9204, |
|
"grad_norm": 6.565597057342529, |
|
"learning_rate": 4.950074357340132e-05, |
|
"epoch": 5.544933078393882, |
|
"step": 2900 |
|
}, |
|
{ |
|
"loss": 6.8926, |
|
"grad_norm": 6.9456095695495605, |
|
"learning_rate": 4.907584448693436e-05, |
|
"epoch": 5.583173996175908, |
|
"step": 2920 |
|
}, |
|
{ |
|
"loss": 6.8993, |
|
"grad_norm": 7.052099704742432, |
|
"learning_rate": 4.865094540046739e-05, |
|
"epoch": 5.621414913957935, |
|
"step": 2940 |
|
}, |
|
{ |
|
"loss": 6.8474, |
|
"grad_norm": 7.128490924835205, |
|
"learning_rate": 4.822604631400043e-05, |
|
"epoch": 5.659655831739962, |
|
"step": 2960 |
|
}, |
|
{ |
|
"loss": 6.8509, |
|
"grad_norm": 6.792144298553467, |
|
"learning_rate": 4.780114722753346e-05, |
|
"epoch": 5.6978967495219885, |
|
"step": 2980 |
|
}, |
|
{ |
|
"loss": 6.9141, |
|
"grad_norm": 6.853285312652588, |
|
"learning_rate": 4.73762481410665e-05, |
|
"epoch": 5.736137667304015, |
|
"step": 3000 |
|
}, |
|
{ |
|
"loss": 6.7391, |
|
"grad_norm": 7.153258800506592, |
|
"learning_rate": 4.695134905459953e-05, |
|
"epoch": 5.774378585086042, |
|
"step": 3020 |
|
}, |
|
{ |
|
"loss": 6.7554, |
|
"grad_norm": 6.9271321296691895, |
|
"learning_rate": 4.6526449968132566e-05, |
|
"epoch": 5.812619502868069, |
|
"step": 3040 |
|
}, |
|
{ |
|
"loss": 6.8172, |
|
"grad_norm": 7.218133926391602, |
|
"learning_rate": 4.6101550881665606e-05, |
|
"epoch": 5.850860420650095, |
|
"step": 3060 |
|
}, |
|
{ |
|
"loss": 6.8442, |
|
"grad_norm": 7.0558695793151855, |
|
"learning_rate": 4.5676651795198646e-05, |
|
"epoch": 5.8891013384321225, |
|
"step": 3080 |
|
}, |
|
{ |
|
"loss": 6.696, |
|
"grad_norm": 6.762065887451172, |
|
"learning_rate": 4.525175270873168e-05, |
|
"epoch": 5.927342256214149, |
|
"step": 3100 |
|
}, |
|
{ |
|
"loss": 6.6763, |
|
"grad_norm": 6.8173604011535645, |
|
"learning_rate": 4.482685362226471e-05, |
|
"epoch": 5.965583173996176, |
|
"step": 3120 |
|
}, |
|
{ |
|
"eval_loss": 5.7182440757751465, |
|
"eval_accuracy": 0.7291624621594349, |
|
"eval_runtime": 444.003, |
|
"eval_samples_per_second": 33.48, |
|
"eval_steps_per_second": 33.48, |
|
"epoch": 6.0, |
|
"step": 3138 |
|
}, |
|
{ |
|
"loss": 6.6927, |
|
"grad_norm": 7.1014723777771, |
|
"learning_rate": 4.440195453579775e-05, |
|
"epoch": 6.003824091778203, |
|
"step": 3140 |
|
}, |
|
{ |
|
"loss": 6.6538, |
|
"grad_norm": 6.958450794219971, |
|
"learning_rate": 4.3977055449330785e-05, |
|
"epoch": 6.042065009560229, |
|
"step": 3160 |
|
}, |
|
{ |
|
"loss": 6.5479, |
|
"grad_norm": 6.920003890991211, |
|
"learning_rate": 4.3552156362863825e-05, |
|
"epoch": 6.080305927342256, |
|
"step": 3180 |
|
}, |
|
{ |
|
"loss": 6.5668, |
|
"grad_norm": 7.053244113922119, |
|
"learning_rate": 4.312725727639686e-05, |
|
"epoch": 6.118546845124283, |
|
"step": 3200 |
|
}, |
|
{ |
|
"loss": 6.6722, |
|
"grad_norm": 6.9157185554504395, |
|
"learning_rate": 4.270235818992989e-05, |
|
"epoch": 6.15678776290631, |
|
"step": 3220 |
|
}, |
|
{ |
|
"loss": 6.6397, |
|
"grad_norm": 7.149935722351074, |
|
"learning_rate": 4.227745910346293e-05, |
|
"epoch": 6.195028680688337, |
|
"step": 3240 |
|
}, |
|
{ |
|
"loss": 6.6041, |
|
"grad_norm": 7.318164825439453, |
|
"learning_rate": 4.185256001699597e-05, |
|
"epoch": 6.233269598470363, |
|
"step": 3260 |
|
}, |
|
{ |
|
"loss": 6.5492, |
|
"grad_norm": 7.044018268585205, |
|
"learning_rate": 4.1427660930529e-05, |
|
"epoch": 6.27151051625239, |
|
"step": 3280 |
|
}, |
|
{ |
|
"loss": 6.5679, |
|
"grad_norm": 7.045164585113525, |
|
"learning_rate": 4.1002761844062037e-05, |
|
"epoch": 6.309751434034417, |
|
"step": 3300 |
|
}, |
|
{ |
|
"loss": 6.5695, |
|
"grad_norm": 7.092489242553711, |
|
"learning_rate": 4.0577862757595076e-05, |
|
"epoch": 6.347992351816444, |
|
"step": 3320 |
|
}, |
|
{ |
|
"loss": 6.4842, |
|
"grad_norm": 6.940147399902344, |
|
"learning_rate": 4.015296367112811e-05, |
|
"epoch": 6.3862332695984705, |
|
"step": 3340 |
|
}, |
|
{ |
|
"loss": 6.5317, |
|
"grad_norm": 7.10172176361084, |
|
"learning_rate": 3.972806458466114e-05, |
|
"epoch": 6.424474187380497, |
|
"step": 3360 |
|
}, |
|
{ |
|
"loss": 6.4702, |
|
"grad_norm": 7.129051208496094, |
|
"learning_rate": 3.930316549819418e-05, |
|
"epoch": 6.462715105162524, |
|
"step": 3380 |
|
}, |
|
{ |
|
"loss": 6.3999, |
|
"grad_norm": 7.501070499420166, |
|
"learning_rate": 3.8878266411727215e-05, |
|
"epoch": 6.500956022944551, |
|
"step": 3400 |
|
}, |
|
{ |
|
"loss": 6.4932, |
|
"grad_norm": 7.325244426727295, |
|
"learning_rate": 3.8453367325260255e-05, |
|
"epoch": 6.539196940726577, |
|
"step": 3420 |
|
}, |
|
{ |
|
"loss": 6.3927, |
|
"grad_norm": 7.361093521118164, |
|
"learning_rate": 3.802846823879329e-05, |
|
"epoch": 6.577437858508604, |
|
"step": 3440 |
|
}, |
|
{ |
|
"loss": 6.4861, |
|
"grad_norm": 7.228673458099365, |
|
"learning_rate": 3.760356915232632e-05, |
|
"epoch": 6.615678776290631, |
|
"step": 3460 |
|
}, |
|
{ |
|
"loss": 6.4623, |
|
"grad_norm": 7.602611064910889, |
|
"learning_rate": 3.717867006585936e-05, |
|
"epoch": 6.653919694072657, |
|
"step": 3480 |
|
}, |
|
{ |
|
"loss": 6.4282, |
|
"grad_norm": 7.901960372924805, |
|
"learning_rate": 3.6753770979392394e-05, |
|
"epoch": 6.692160611854685, |
|
"step": 3500 |
|
}, |
|
{ |
|
"loss": 6.3799, |
|
"grad_norm": 7.1125383377075195, |
|
"learning_rate": 3.6328871892925434e-05, |
|
"epoch": 6.730401529636711, |
|
"step": 3520 |
|
}, |
|
{ |
|
"loss": 6.3707, |
|
"grad_norm": 7.1385884284973145, |
|
"learning_rate": 3.590397280645847e-05, |
|
"epoch": 6.768642447418738, |
|
"step": 3540 |
|
}, |
|
{ |
|
"loss": 6.4388, |
|
"grad_norm": 7.548192977905273, |
|
"learning_rate": 3.54790737199915e-05, |
|
"epoch": 6.806883365200765, |
|
"step": 3560 |
|
}, |
|
{ |
|
"loss": 6.4223, |
|
"grad_norm": 7.492359161376953, |
|
"learning_rate": 3.505417463352454e-05, |
|
"epoch": 6.845124282982791, |
|
"step": 3580 |
|
}, |
|
{ |
|
"loss": 6.3552, |
|
"grad_norm": 7.575985431671143, |
|
"learning_rate": 3.462927554705758e-05, |
|
"epoch": 6.8833652007648185, |
|
"step": 3600 |
|
}, |
|
{ |
|
"loss": 6.3379, |
|
"grad_norm": 7.351112365722656, |
|
"learning_rate": 3.4204376460590606e-05, |
|
"epoch": 6.921606118546845, |
|
"step": 3620 |
|
}, |
|
{ |
|
"loss": 6.3429, |
|
"grad_norm": 7.33430290222168, |
|
"learning_rate": 3.3779477374123646e-05, |
|
"epoch": 6.959847036328872, |
|
"step": 3640 |
|
}, |
|
{ |
|
"loss": 6.3112, |
|
"grad_norm": 7.511825084686279, |
|
"learning_rate": 3.3354578287656686e-05, |
|
"epoch": 6.998087954110899, |
|
"step": 3660 |
|
}, |
|
{ |
|
"eval_loss": 5.265278339385986, |
|
"eval_accuracy": 0.7632021527077026, |
|
"eval_runtime": 484.395, |
|
"eval_samples_per_second": 30.688, |
|
"eval_steps_per_second": 30.688, |
|
"epoch": 7.0, |
|
"step": 3661 |
|
}, |
|
{ |
|
"loss": 6.1764, |
|
"grad_norm": 7.424711227416992, |
|
"learning_rate": 3.292967920118972e-05, |
|
"epoch": 7.036328871892925, |
|
"step": 3680 |
|
}, |
|
{ |
|
"loss": 6.2389, |
|
"grad_norm": 7.648799896240234, |
|
"learning_rate": 3.250478011472275e-05, |
|
"epoch": 7.074569789674952, |
|
"step": 3700 |
|
}, |
|
{ |
|
"loss": 6.2506, |
|
"grad_norm": 7.4450483322143555, |
|
"learning_rate": 3.207988102825579e-05, |
|
"epoch": 7.112810707456979, |
|
"step": 3720 |
|
}, |
|
{ |
|
"loss": 6.2049, |
|
"grad_norm": 7.422061443328857, |
|
"learning_rate": 3.1654981941788825e-05, |
|
"epoch": 7.151051625239006, |
|
"step": 3740 |
|
}, |
|
{ |
|
"loss": 6.2906, |
|
"grad_norm": 7.345204830169678, |
|
"learning_rate": 3.1230082855321864e-05, |
|
"epoch": 7.189292543021033, |
|
"step": 3760 |
|
}, |
|
{ |
|
"loss": 6.2644, |
|
"grad_norm": 7.486473083496094, |
|
"learning_rate": 3.08051837688549e-05, |
|
"epoch": 7.227533460803059, |
|
"step": 3780 |
|
}, |
|
{ |
|
"loss": 6.2421, |
|
"grad_norm": 7.317290782928467, |
|
"learning_rate": 3.0380284682387934e-05, |
|
"epoch": 7.265774378585086, |
|
"step": 3800 |
|
}, |
|
{ |
|
"loss": 6.1406, |
|
"grad_norm": 7.4384002685546875, |
|
"learning_rate": 2.995538559592097e-05, |
|
"epoch": 7.304015296367113, |
|
"step": 3820 |
|
}, |
|
{ |
|
"loss": 6.2031, |
|
"grad_norm": 7.7606000900268555, |
|
"learning_rate": 2.9530486509454007e-05, |
|
"epoch": 7.342256214149139, |
|
"step": 3840 |
|
}, |
|
{ |
|
"loss": 6.127, |
|
"grad_norm": 7.305050373077393, |
|
"learning_rate": 2.910558742298704e-05, |
|
"epoch": 7.3804971319311665, |
|
"step": 3860 |
|
}, |
|
{ |
|
"loss": 6.1474, |
|
"grad_norm": 7.713500022888184, |
|
"learning_rate": 2.868068833652008e-05, |
|
"epoch": 7.418738049713193, |
|
"step": 3880 |
|
}, |
|
{ |
|
"loss": 6.1542, |
|
"grad_norm": 8.028603553771973, |
|
"learning_rate": 2.8255789250053116e-05, |
|
"epoch": 7.45697896749522, |
|
"step": 3900 |
|
}, |
|
{ |
|
"loss": 6.225, |
|
"grad_norm": 7.4730329513549805, |
|
"learning_rate": 2.783089016358615e-05, |
|
"epoch": 7.495219885277247, |
|
"step": 3920 |
|
}, |
|
{ |
|
"loss": 6.1674, |
|
"grad_norm": 7.52304220199585, |
|
"learning_rate": 2.7405991077119186e-05, |
|
"epoch": 7.533460803059273, |
|
"step": 3940 |
|
}, |
|
{ |
|
"loss": 6.1169, |
|
"grad_norm": 7.616427898406982, |
|
"learning_rate": 2.6981091990652225e-05, |
|
"epoch": 7.5717017208413, |
|
"step": 3960 |
|
}, |
|
{ |
|
"loss": 6.1041, |
|
"grad_norm": 7.784472465515137, |
|
"learning_rate": 2.6556192904185255e-05, |
|
"epoch": 7.609942638623327, |
|
"step": 3980 |
|
}, |
|
{ |
|
"loss": 6.1069, |
|
"grad_norm": 7.819777011871338, |
|
"learning_rate": 2.6131293817718295e-05, |
|
"epoch": 7.648183556405353, |
|
"step": 4000 |
|
}, |
|
{ |
|
"loss": 5.9985, |
|
"grad_norm": 7.889120101928711, |
|
"learning_rate": 2.5706394731251328e-05, |
|
"epoch": 7.686424474187381, |
|
"step": 4020 |
|
}, |
|
{ |
|
"loss": 6.0437, |
|
"grad_norm": 7.858097076416016, |
|
"learning_rate": 2.5281495644784364e-05, |
|
"epoch": 7.724665391969407, |
|
"step": 4040 |
|
}, |
|
{ |
|
"loss": 6.1376, |
|
"grad_norm": 7.739562511444092, |
|
"learning_rate": 2.48565965583174e-05, |
|
"epoch": 7.762906309751434, |
|
"step": 4060 |
|
}, |
|
{ |
|
"loss": 6.2084, |
|
"grad_norm": 7.778552532196045, |
|
"learning_rate": 2.4431697471850437e-05, |
|
"epoch": 7.801147227533461, |
|
"step": 4080 |
|
}, |
|
{ |
|
"loss": 6.0325, |
|
"grad_norm": 7.536991596221924, |
|
"learning_rate": 2.4006798385383474e-05, |
|
"epoch": 7.839388145315487, |
|
"step": 4100 |
|
}, |
|
{ |
|
"loss": 6.098, |
|
"grad_norm": 7.846856594085693, |
|
"learning_rate": 2.3581899298916507e-05, |
|
"epoch": 7.8776290630975145, |
|
"step": 4120 |
|
}, |
|
{ |
|
"loss": 5.9765, |
|
"grad_norm": 7.760807991027832, |
|
"learning_rate": 2.3157000212449547e-05, |
|
"epoch": 7.915869980879541, |
|
"step": 4140 |
|
}, |
|
{ |
|
"loss": 5.9915, |
|
"grad_norm": 7.827345371246338, |
|
"learning_rate": 2.273210112598258e-05, |
|
"epoch": 7.954110898661568, |
|
"step": 4160 |
|
}, |
|
{ |
|
"loss": 6.0255, |
|
"grad_norm": 8.129748344421387, |
|
"learning_rate": 2.2307202039515616e-05, |
|
"epoch": 7.992351816443595, |
|
"step": 4180 |
|
}, |
|
{ |
|
"eval_loss": 4.966301918029785, |
|
"eval_accuracy": 0.782643794147326, |
|
"eval_runtime": 260.149, |
|
"eval_samples_per_second": 57.14, |
|
"eval_steps_per_second": 57.14, |
|
"epoch": 8.0, |
|
"step": 4184 |
|
}, |
|
{ |
|
"loss": 6.0763, |
|
"grad_norm": 7.686340808868408, |
|
"learning_rate": 2.1882302953048652e-05, |
|
"epoch": 8.030592734225621, |
|
"step": 4200 |
|
}, |
|
{ |
|
"loss": 5.868, |
|
"grad_norm": 7.666318893432617, |
|
"learning_rate": 2.145740386658169e-05, |
|
"epoch": 8.068833652007648, |
|
"step": 4220 |
|
}, |
|
{ |
|
"loss": 5.8964, |
|
"grad_norm": 7.686400890350342, |
|
"learning_rate": 2.1032504780114722e-05, |
|
"epoch": 8.107074569789676, |
|
"step": 4240 |
|
}, |
|
{ |
|
"loss": 5.8408, |
|
"grad_norm": 7.418490886688232, |
|
"learning_rate": 2.0607605693647762e-05, |
|
"epoch": 8.145315487571702, |
|
"step": 4260 |
|
}, |
|
{ |
|
"loss": 5.9742, |
|
"grad_norm": 7.769067287445068, |
|
"learning_rate": 2.0182706607180795e-05, |
|
"epoch": 8.183556405353729, |
|
"step": 4280 |
|
}, |
|
{ |
|
"loss": 5.913, |
|
"grad_norm": 7.915468215942383, |
|
"learning_rate": 1.975780752071383e-05, |
|
"epoch": 8.221797323135755, |
|
"step": 4300 |
|
}, |
|
{ |
|
"loss": 5.8613, |
|
"grad_norm": 7.884761810302734, |
|
"learning_rate": 1.9332908434246868e-05, |
|
"epoch": 8.260038240917781, |
|
"step": 4320 |
|
}, |
|
{ |
|
"loss": 5.9791, |
|
"grad_norm": 7.765011787414551, |
|
"learning_rate": 1.8908009347779904e-05, |
|
"epoch": 8.29827915869981, |
|
"step": 4340 |
|
}, |
|
{ |
|
"loss": 5.9675, |
|
"grad_norm": 8.110984802246094, |
|
"learning_rate": 1.8483110261312937e-05, |
|
"epoch": 8.336520076481836, |
|
"step": 4360 |
|
}, |
|
{ |
|
"loss": 5.9804, |
|
"grad_norm": 8.114306449890137, |
|
"learning_rate": 1.8058211174845974e-05, |
|
"epoch": 8.374760994263863, |
|
"step": 4380 |
|
}, |
|
{ |
|
"loss": 5.8832, |
|
"grad_norm": 7.981202125549316, |
|
"learning_rate": 1.763331208837901e-05, |
|
"epoch": 8.413001912045889, |
|
"step": 4400 |
|
}, |
|
{ |
|
"loss": 5.9301, |
|
"grad_norm": 7.628136157989502, |
|
"learning_rate": 1.7208413001912046e-05, |
|
"epoch": 8.451242829827915, |
|
"step": 4420 |
|
}, |
|
{ |
|
"loss": 5.8983, |
|
"grad_norm": 7.863382816314697, |
|
"learning_rate": 1.6783513915445083e-05, |
|
"epoch": 8.489483747609942, |
|
"step": 4440 |
|
}, |
|
{ |
|
"loss": 5.8938, |
|
"grad_norm": 7.82211971282959, |
|
"learning_rate": 1.635861482897812e-05, |
|
"epoch": 8.52772466539197, |
|
"step": 4460 |
|
}, |
|
{ |
|
"loss": 5.8945, |
|
"grad_norm": 8.038976669311523, |
|
"learning_rate": 1.5933715742511156e-05, |
|
"epoch": 8.565965583173996, |
|
"step": 4480 |
|
}, |
|
{ |
|
"loss": 5.8895, |
|
"grad_norm": 7.884932518005371, |
|
"learning_rate": 1.550881665604419e-05, |
|
"epoch": 8.604206500956023, |
|
"step": 4500 |
|
}, |
|
{ |
|
"loss": 5.9617, |
|
"grad_norm": 7.975419521331787, |
|
"learning_rate": 1.5083917569577227e-05, |
|
"epoch": 8.64244741873805, |
|
"step": 4520 |
|
}, |
|
{ |
|
"loss": 5.8659, |
|
"grad_norm": 7.786068916320801, |
|
"learning_rate": 1.4659018483110262e-05, |
|
"epoch": 8.680688336520076, |
|
"step": 4540 |
|
}, |
|
{ |
|
"loss": 5.9116, |
|
"grad_norm": 8.130301475524902, |
|
"learning_rate": 1.4234119396643298e-05, |
|
"epoch": 8.718929254302104, |
|
"step": 4560 |
|
}, |
|
{ |
|
"loss": 5.8536, |
|
"grad_norm": 8.042682647705078, |
|
"learning_rate": 1.3809220310176335e-05, |
|
"epoch": 8.75717017208413, |
|
"step": 4580 |
|
}, |
|
{ |
|
"loss": 5.9241, |
|
"grad_norm": 8.327803611755371, |
|
"learning_rate": 1.3384321223709371e-05, |
|
"epoch": 8.795411089866157, |
|
"step": 4600 |
|
}, |
|
{ |
|
"loss": 5.864, |
|
"grad_norm": 7.880401134490967, |
|
"learning_rate": 1.2959422137242406e-05, |
|
"epoch": 8.833652007648183, |
|
"step": 4620 |
|
}, |
|
{ |
|
"loss": 5.9457, |
|
"grad_norm": 7.6825127601623535, |
|
"learning_rate": 1.253452305077544e-05, |
|
"epoch": 8.87189292543021, |
|
"step": 4640 |
|
}, |
|
{ |
|
"loss": 5.8329, |
|
"grad_norm": 7.971193313598633, |
|
"learning_rate": 1.2109623964308479e-05, |
|
"epoch": 8.910133843212238, |
|
"step": 4660 |
|
}, |
|
{ |
|
"loss": 5.8671, |
|
"grad_norm": 8.04354476928711, |
|
"learning_rate": 1.1684724877841513e-05, |
|
"epoch": 8.948374760994264, |
|
"step": 4680 |
|
}, |
|
{ |
|
"loss": 5.8091, |
|
"grad_norm": 7.942180633544922, |
|
"learning_rate": 1.125982579137455e-05, |
|
"epoch": 8.98661567877629, |
|
"step": 4700 |
|
}, |
|
{ |
|
"eval_loss": 4.778744220733643, |
|
"eval_accuracy": 0.7956945845946855, |
|
"eval_runtime": 531.1827, |
|
"eval_samples_per_second": 27.985, |
|
"eval_steps_per_second": 27.985, |
|
"epoch": 9.0, |
|
"step": 4707 |
|
}, |
|
{ |
|
"loss": 5.7978, |
|
"grad_norm": 7.77038049697876, |
|
"learning_rate": 1.0834926704907584e-05, |
|
"epoch": 9.024856596558317, |
|
"step": 4720 |
|
}, |
|
{ |
|
"loss": 5.7849, |
|
"grad_norm": 7.850288391113281, |
|
"learning_rate": 1.0410027618440621e-05, |
|
"epoch": 9.063097514340344, |
|
"step": 4740 |
|
}, |
|
{ |
|
"loss": 5.7891, |
|
"grad_norm": 8.032878875732422, |
|
"learning_rate": 9.985128531973657e-06, |
|
"epoch": 9.101338432122372, |
|
"step": 4760 |
|
}, |
|
{ |
|
"loss": 5.781, |
|
"grad_norm": 7.886658668518066, |
|
"learning_rate": 9.560229445506692e-06, |
|
"epoch": 9.139579349904398, |
|
"step": 4780 |
|
}, |
|
{ |
|
"loss": 5.8584, |
|
"grad_norm": 7.953343868255615, |
|
"learning_rate": 9.135330359039729e-06, |
|
"epoch": 9.177820267686425, |
|
"step": 4800 |
|
}, |
|
{ |
|
"loss": 5.8192, |
|
"grad_norm": 7.899537563323975, |
|
"learning_rate": 8.710431272572763e-06, |
|
"epoch": 9.216061185468451, |
|
"step": 4820 |
|
}, |
|
{ |
|
"loss": 5.7122, |
|
"grad_norm": 8.269824028015137, |
|
"learning_rate": 8.2855321861058e-06, |
|
"epoch": 9.254302103250478, |
|
"step": 4840 |
|
}, |
|
{ |
|
"loss": 5.7634, |
|
"grad_norm": 7.824770450592041, |
|
"learning_rate": 7.860633099638836e-06, |
|
"epoch": 9.292543021032504, |
|
"step": 4860 |
|
}, |
|
{ |
|
"loss": 5.8083, |
|
"grad_norm": 7.953860759735107, |
|
"learning_rate": 7.435734013171872e-06, |
|
"epoch": 9.330783938814532, |
|
"step": 4880 |
|
}, |
|
{ |
|
"loss": 5.8012, |
|
"grad_norm": 8.25514030456543, |
|
"learning_rate": 7.010834926704908e-06, |
|
"epoch": 9.369024856596559, |
|
"step": 4900 |
|
}, |
|
{ |
|
"loss": 5.7938, |
|
"grad_norm": 8.2761869430542, |
|
"learning_rate": 6.585935840237943e-06, |
|
"epoch": 9.407265774378585, |
|
"step": 4920 |
|
}, |
|
{ |
|
"loss": 5.6735, |
|
"grad_norm": 7.865163803100586, |
|
"learning_rate": 6.161036753770979e-06, |
|
"epoch": 9.445506692160611, |
|
"step": 4940 |
|
}, |
|
{ |
|
"loss": 5.7914, |
|
"grad_norm": 8.172937393188477, |
|
"learning_rate": 5.736137667304015e-06, |
|
"epoch": 9.483747609942638, |
|
"step": 4960 |
|
}, |
|
{ |
|
"loss": 5.7702, |
|
"grad_norm": 8.558911323547363, |
|
"learning_rate": 5.311238580837051e-06, |
|
"epoch": 9.521988527724666, |
|
"step": 4980 |
|
}, |
|
{ |
|
"loss": 5.7283, |
|
"grad_norm": 8.265515327453613, |
|
"learning_rate": 4.886339494370088e-06, |
|
"epoch": 9.560229445506693, |
|
"step": 5000 |
|
}, |
|
{ |
|
"loss": 5.8007, |
|
"grad_norm": 8.17795467376709, |
|
"learning_rate": 4.461440407903123e-06, |
|
"epoch": 9.598470363288719, |
|
"step": 5020 |
|
}, |
|
{ |
|
"loss": 5.8121, |
|
"grad_norm": 8.109586715698242, |
|
"learning_rate": 4.036541321436159e-06, |
|
"epoch": 9.636711281070745, |
|
"step": 5040 |
|
}, |
|
{ |
|
"loss": 5.789, |
|
"grad_norm": 7.911646842956543, |
|
"learning_rate": 3.6116422349691954e-06, |
|
"epoch": 9.674952198852772, |
|
"step": 5060 |
|
}, |
|
{ |
|
"loss": 5.7266, |
|
"grad_norm": 8.030941009521484, |
|
"learning_rate": 3.186743148502231e-06, |
|
"epoch": 9.7131931166348, |
|
"step": 5080 |
|
}, |
|
{ |
|
"loss": 5.761, |
|
"grad_norm": 8.059958457946777, |
|
"learning_rate": 2.7618440620352666e-06, |
|
"epoch": 9.751434034416826, |
|
"step": 5100 |
|
}, |
|
{ |
|
"loss": 5.7338, |
|
"grad_norm": 8.002403259277344, |
|
"learning_rate": 2.3369449755683026e-06, |
|
"epoch": 9.789674952198853, |
|
"step": 5120 |
|
}, |
|
{ |
|
"loss": 5.7088, |
|
"grad_norm": 8.306962966918945, |
|
"learning_rate": 1.9120458891013386e-06, |
|
"epoch": 9.82791586998088, |
|
"step": 5140 |
|
}, |
|
{ |
|
"loss": 5.6973, |
|
"grad_norm": 8.018095970153809, |
|
"learning_rate": 1.4871468026343744e-06, |
|
"epoch": 9.866156787762906, |
|
"step": 5160 |
|
}, |
|
{ |
|
"loss": 5.6422, |
|
"grad_norm": 8.168917655944824, |
|
"learning_rate": 1.0622477161674104e-06, |
|
"epoch": 9.904397705544934, |
|
"step": 5180 |
|
}, |
|
{ |
|
"loss": 5.7399, |
|
"grad_norm": 7.939206123352051, |
|
"learning_rate": 6.373486297004462e-07, |
|
"epoch": 9.94263862332696, |
|
"step": 5200 |
|
}, |
|
{ |
|
"loss": 5.7269, |
|
"grad_norm": 7.970940589904785, |
|
"learning_rate": 2.1244954323348205e-07, |
|
"epoch": 9.980879541108987, |
|
"step": 5220 |
|
}, |
|
{ |
|
"eval_loss": 4.700281620025635, |
|
"eval_accuracy": 0.8030272452068618, |
|
"eval_runtime": 552.5641, |
|
"eval_samples_per_second": 26.902, |
|
"eval_steps_per_second": 26.902, |
|
"epoch": 10.0, |
|
"step": 5230 |
|
}, |
|
{ |
|
"train_runtime": 28748.4048, |
|
"train_samples_per_second": 46.534, |
|
"train_steps_per_second": 0.182, |
|
"total_flos": 2.49073133395968e+18, |
|
"train_loss": 7.888943860726876, |
|
"epoch": 10.0, |
|
"step": 5230 |
|
} |
|
] |