smol-encoder-135m / last-checkpoint /trainer_state.json
nadahlberg's picture
Training in progress, step 4000, checkpoint
bdbe680 verified
raw
history blame
60.5 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.02,
"eval_steps": 2000,
"global_step": 4000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 5e-05,
"grad_norm": 31.625,
"learning_rate": 5.000000000000001e-07,
"loss": 11.0902,
"step": 10
},
{
"epoch": 0.0001,
"grad_norm": 46.0,
"learning_rate": 1.0000000000000002e-06,
"loss": 11.0927,
"step": 20
},
{
"epoch": 0.00015,
"grad_norm": 30.25,
"learning_rate": 1.5e-06,
"loss": 11.0934,
"step": 30
},
{
"epoch": 0.0002,
"grad_norm": 50.5,
"learning_rate": 2.0000000000000003e-06,
"loss": 11.0834,
"step": 40
},
{
"epoch": 0.00025,
"grad_norm": 20.25,
"learning_rate": 2.5e-06,
"loss": 11.082,
"step": 50
},
{
"epoch": 0.0003,
"grad_norm": 53.0,
"learning_rate": 3e-06,
"loss": 11.096,
"step": 60
},
{
"epoch": 0.00035,
"grad_norm": 32.75,
"learning_rate": 3.5000000000000004e-06,
"loss": 11.0645,
"step": 70
},
{
"epoch": 0.0004,
"grad_norm": 36.0,
"learning_rate": 4.000000000000001e-06,
"loss": 11.0728,
"step": 80
},
{
"epoch": 0.00045,
"grad_norm": 20.25,
"learning_rate": 4.5e-06,
"loss": 11.0674,
"step": 90
},
{
"epoch": 0.0005,
"grad_norm": 25.125,
"learning_rate": 5e-06,
"loss": 11.0666,
"step": 100
},
{
"epoch": 0.00055,
"grad_norm": 16.75,
"learning_rate": 5.500000000000001e-06,
"loss": 11.0664,
"step": 110
},
{
"epoch": 0.0006,
"grad_norm": 22.125,
"learning_rate": 6e-06,
"loss": 11.0592,
"step": 120
},
{
"epoch": 0.00065,
"grad_norm": 23.625,
"learning_rate": 6.5000000000000004e-06,
"loss": 11.049,
"step": 130
},
{
"epoch": 0.0007,
"grad_norm": 13.3125,
"learning_rate": 7.000000000000001e-06,
"loss": 11.0685,
"step": 140
},
{
"epoch": 0.00075,
"grad_norm": 15.0,
"learning_rate": 7.5e-06,
"loss": 11.0337,
"step": 150
},
{
"epoch": 0.0008,
"grad_norm": 30.125,
"learning_rate": 8.000000000000001e-06,
"loss": 11.0335,
"step": 160
},
{
"epoch": 0.00085,
"grad_norm": 14.75,
"learning_rate": 8.500000000000002e-06,
"loss": 11.0266,
"step": 170
},
{
"epoch": 0.0009,
"grad_norm": 17.375,
"learning_rate": 9e-06,
"loss": 11.0171,
"step": 180
},
{
"epoch": 0.00095,
"grad_norm": 42.0,
"learning_rate": 9.5e-06,
"loss": 11.0091,
"step": 190
},
{
"epoch": 0.001,
"grad_norm": 29.25,
"learning_rate": 1e-05,
"loss": 11.0014,
"step": 200
},
{
"epoch": 0.00105,
"grad_norm": 18.375,
"learning_rate": 1.05e-05,
"loss": 10.993,
"step": 210
},
{
"epoch": 0.0011,
"grad_norm": 40.25,
"learning_rate": 1.1000000000000001e-05,
"loss": 10.973,
"step": 220
},
{
"epoch": 0.00115,
"grad_norm": 39.0,
"learning_rate": 1.1500000000000002e-05,
"loss": 10.9513,
"step": 230
},
{
"epoch": 0.0012,
"grad_norm": 23.375,
"learning_rate": 1.2e-05,
"loss": 10.9342,
"step": 240
},
{
"epoch": 0.00125,
"grad_norm": 33.25,
"learning_rate": 1.25e-05,
"loss": 10.9172,
"step": 250
},
{
"epoch": 0.0013,
"grad_norm": 42.25,
"learning_rate": 1.3000000000000001e-05,
"loss": 10.9073,
"step": 260
},
{
"epoch": 0.00135,
"grad_norm": 10.625,
"learning_rate": 1.3500000000000001e-05,
"loss": 10.8946,
"step": 270
},
{
"epoch": 0.0014,
"grad_norm": 10.6875,
"learning_rate": 1.4000000000000001e-05,
"loss": 10.8823,
"step": 280
},
{
"epoch": 0.00145,
"grad_norm": 63.75,
"learning_rate": 1.45e-05,
"loss": 10.867,
"step": 290
},
{
"epoch": 0.0015,
"grad_norm": 38.5,
"learning_rate": 1.5e-05,
"loss": 10.8479,
"step": 300
},
{
"epoch": 0.00155,
"grad_norm": 17.375,
"learning_rate": 1.55e-05,
"loss": 10.8308,
"step": 310
},
{
"epoch": 0.0016,
"grad_norm": 42.0,
"learning_rate": 1.6000000000000003e-05,
"loss": 10.8119,
"step": 320
},
{
"epoch": 0.00165,
"grad_norm": 22.25,
"learning_rate": 1.65e-05,
"loss": 10.8032,
"step": 330
},
{
"epoch": 0.0017,
"grad_norm": 13.0,
"learning_rate": 1.7000000000000003e-05,
"loss": 10.7896,
"step": 340
},
{
"epoch": 0.00175,
"grad_norm": 32.5,
"learning_rate": 1.75e-05,
"loss": 10.7853,
"step": 350
},
{
"epoch": 0.0018,
"grad_norm": 12.75,
"learning_rate": 1.8e-05,
"loss": 10.7559,
"step": 360
},
{
"epoch": 0.00185,
"grad_norm": 16.75,
"learning_rate": 1.85e-05,
"loss": 10.7292,
"step": 370
},
{
"epoch": 0.0019,
"grad_norm": 13.5,
"learning_rate": 1.9e-05,
"loss": 10.7117,
"step": 380
},
{
"epoch": 0.00195,
"grad_norm": 26.625,
"learning_rate": 1.9500000000000003e-05,
"loss": 10.6682,
"step": 390
},
{
"epoch": 0.002,
"grad_norm": 27.625,
"learning_rate": 2e-05,
"loss": 10.6554,
"step": 400
},
{
"epoch": 0.00205,
"grad_norm": 18.5,
"learning_rate": 2.05e-05,
"loss": 10.6264,
"step": 410
},
{
"epoch": 0.0021,
"grad_norm": 22.375,
"learning_rate": 2.1e-05,
"loss": 10.5963,
"step": 420
},
{
"epoch": 0.00215,
"grad_norm": 16.625,
"learning_rate": 2.15e-05,
"loss": 10.5767,
"step": 430
},
{
"epoch": 0.0022,
"grad_norm": 20.5,
"learning_rate": 2.2000000000000003e-05,
"loss": 10.5003,
"step": 440
},
{
"epoch": 0.00225,
"grad_norm": 128.0,
"learning_rate": 2.25e-05,
"loss": 10.4183,
"step": 450
},
{
"epoch": 0.0023,
"grad_norm": 35.25,
"learning_rate": 2.3000000000000003e-05,
"loss": 10.4039,
"step": 460
},
{
"epoch": 0.00235,
"grad_norm": 46.5,
"learning_rate": 2.35e-05,
"loss": 10.337,
"step": 470
},
{
"epoch": 0.0024,
"grad_norm": 31.75,
"learning_rate": 2.4e-05,
"loss": 10.287,
"step": 480
},
{
"epoch": 0.00245,
"grad_norm": 15.5625,
"learning_rate": 2.45e-05,
"loss": 10.258,
"step": 490
},
{
"epoch": 0.0025,
"grad_norm": 30.375,
"learning_rate": 2.5e-05,
"loss": 10.2216,
"step": 500
},
{
"epoch": 0.00255,
"grad_norm": 14.125,
"learning_rate": 2.5500000000000003e-05,
"loss": 10.1201,
"step": 510
},
{
"epoch": 0.0026,
"grad_norm": 28.5,
"learning_rate": 2.6000000000000002e-05,
"loss": 10.054,
"step": 520
},
{
"epoch": 0.00265,
"grad_norm": 14.0625,
"learning_rate": 2.6500000000000004e-05,
"loss": 9.9927,
"step": 530
},
{
"epoch": 0.0027,
"grad_norm": 10.375,
"learning_rate": 2.7000000000000002e-05,
"loss": 9.8655,
"step": 540
},
{
"epoch": 0.00275,
"grad_norm": 30.5,
"learning_rate": 2.7500000000000004e-05,
"loss": 9.7691,
"step": 550
},
{
"epoch": 0.0028,
"grad_norm": 20.125,
"learning_rate": 2.8000000000000003e-05,
"loss": 9.612,
"step": 560
},
{
"epoch": 0.00285,
"grad_norm": 12.0625,
"learning_rate": 2.8499999999999998e-05,
"loss": 9.4771,
"step": 570
},
{
"epoch": 0.0029,
"grad_norm": 37.75,
"learning_rate": 2.9e-05,
"loss": 9.3691,
"step": 580
},
{
"epoch": 0.00295,
"grad_norm": 9.5625,
"learning_rate": 2.95e-05,
"loss": 9.1528,
"step": 590
},
{
"epoch": 0.003,
"grad_norm": 62.0,
"learning_rate": 3e-05,
"loss": 9.0495,
"step": 600
},
{
"epoch": 0.00305,
"grad_norm": 25.125,
"learning_rate": 3.05e-05,
"loss": 8.8528,
"step": 610
},
{
"epoch": 0.0031,
"grad_norm": 32.75,
"learning_rate": 3.1e-05,
"loss": 8.7636,
"step": 620
},
{
"epoch": 0.00315,
"grad_norm": 25.75,
"learning_rate": 3.15e-05,
"loss": 8.6855,
"step": 630
},
{
"epoch": 0.0032,
"grad_norm": 18.625,
"learning_rate": 3.2000000000000005e-05,
"loss": 8.615,
"step": 640
},
{
"epoch": 0.00325,
"grad_norm": 20.5,
"learning_rate": 3.2500000000000004e-05,
"loss": 8.5321,
"step": 650
},
{
"epoch": 0.0033,
"grad_norm": 188.0,
"learning_rate": 3.3e-05,
"loss": 8.4743,
"step": 660
},
{
"epoch": 0.00335,
"grad_norm": 20.5,
"learning_rate": 3.35e-05,
"loss": 8.4076,
"step": 670
},
{
"epoch": 0.0034,
"grad_norm": 58.25,
"learning_rate": 3.4000000000000007e-05,
"loss": 8.3754,
"step": 680
},
{
"epoch": 0.00345,
"grad_norm": 26.5,
"learning_rate": 3.45e-05,
"loss": 8.3274,
"step": 690
},
{
"epoch": 0.0035,
"grad_norm": 11.875,
"learning_rate": 3.5e-05,
"loss": 8.2515,
"step": 700
},
{
"epoch": 0.00355,
"grad_norm": 39.5,
"learning_rate": 3.55e-05,
"loss": 8.2337,
"step": 710
},
{
"epoch": 0.0036,
"grad_norm": 30.625,
"learning_rate": 3.6e-05,
"loss": 8.1032,
"step": 720
},
{
"epoch": 0.00365,
"grad_norm": 9.5,
"learning_rate": 3.65e-05,
"loss": 8.0804,
"step": 730
},
{
"epoch": 0.0037,
"grad_norm": 24.125,
"learning_rate": 3.7e-05,
"loss": 8.0145,
"step": 740
},
{
"epoch": 0.00375,
"grad_norm": 11.375,
"learning_rate": 3.7500000000000003e-05,
"loss": 7.9738,
"step": 750
},
{
"epoch": 0.0038,
"grad_norm": 76.0,
"learning_rate": 3.8e-05,
"loss": 7.943,
"step": 760
},
{
"epoch": 0.00385,
"grad_norm": 28.625,
"learning_rate": 3.85e-05,
"loss": 7.9069,
"step": 770
},
{
"epoch": 0.0039,
"grad_norm": 35.75,
"learning_rate": 3.9000000000000006e-05,
"loss": 7.8638,
"step": 780
},
{
"epoch": 0.00395,
"grad_norm": 26.25,
"learning_rate": 3.9500000000000005e-05,
"loss": 7.8659,
"step": 790
},
{
"epoch": 0.004,
"grad_norm": 64.0,
"learning_rate": 4e-05,
"loss": 7.8315,
"step": 800
},
{
"epoch": 0.00405,
"grad_norm": 11.0625,
"learning_rate": 4.05e-05,
"loss": 7.8273,
"step": 810
},
{
"epoch": 0.0041,
"grad_norm": 18.875,
"learning_rate": 4.1e-05,
"loss": 7.7964,
"step": 820
},
{
"epoch": 0.00415,
"grad_norm": 30.375,
"learning_rate": 4.15e-05,
"loss": 7.7627,
"step": 830
},
{
"epoch": 0.0042,
"grad_norm": 12.1875,
"learning_rate": 4.2e-05,
"loss": 7.7935,
"step": 840
},
{
"epoch": 0.00425,
"grad_norm": 27.25,
"learning_rate": 4.25e-05,
"loss": 7.7644,
"step": 850
},
{
"epoch": 0.0043,
"grad_norm": 12.0625,
"learning_rate": 4.3e-05,
"loss": 7.7387,
"step": 860
},
{
"epoch": 0.00435,
"grad_norm": 24.25,
"learning_rate": 4.35e-05,
"loss": 7.7656,
"step": 870
},
{
"epoch": 0.0044,
"grad_norm": 75.0,
"learning_rate": 4.4000000000000006e-05,
"loss": 7.7289,
"step": 880
},
{
"epoch": 0.00445,
"grad_norm": 49.25,
"learning_rate": 4.4500000000000004e-05,
"loss": 7.7205,
"step": 890
},
{
"epoch": 0.0045,
"grad_norm": 7.4375,
"learning_rate": 4.5e-05,
"loss": 7.721,
"step": 900
},
{
"epoch": 0.00455,
"grad_norm": 23.875,
"learning_rate": 4.55e-05,
"loss": 7.6778,
"step": 910
},
{
"epoch": 0.0046,
"grad_norm": 27.125,
"learning_rate": 4.600000000000001e-05,
"loss": 7.6931,
"step": 920
},
{
"epoch": 0.00465,
"grad_norm": 3.5625,
"learning_rate": 4.6500000000000005e-05,
"loss": 7.6755,
"step": 930
},
{
"epoch": 0.0047,
"grad_norm": 12.1875,
"learning_rate": 4.7e-05,
"loss": 7.6832,
"step": 940
},
{
"epoch": 0.00475,
"grad_norm": 76.5,
"learning_rate": 4.75e-05,
"loss": 7.6759,
"step": 950
},
{
"epoch": 0.0048,
"grad_norm": 6.125,
"learning_rate": 4.8e-05,
"loss": 7.6717,
"step": 960
},
{
"epoch": 0.00485,
"grad_norm": 82.0,
"learning_rate": 4.85e-05,
"loss": 7.6602,
"step": 970
},
{
"epoch": 0.0049,
"grad_norm": 107.5,
"learning_rate": 4.9e-05,
"loss": 7.638,
"step": 980
},
{
"epoch": 0.00495,
"grad_norm": 14.1875,
"learning_rate": 4.9500000000000004e-05,
"loss": 7.6483,
"step": 990
},
{
"epoch": 0.005,
"grad_norm": 13.5,
"learning_rate": 5e-05,
"loss": 7.6629,
"step": 1000
},
{
"epoch": 0.00505,
"grad_norm": 15.4375,
"learning_rate": 4.9997487437185933e-05,
"loss": 7.6448,
"step": 1010
},
{
"epoch": 0.0051,
"grad_norm": 26.125,
"learning_rate": 4.9994974874371864e-05,
"loss": 7.6476,
"step": 1020
},
{
"epoch": 0.00515,
"grad_norm": 12.3125,
"learning_rate": 4.999246231155779e-05,
"loss": 7.6406,
"step": 1030
},
{
"epoch": 0.0052,
"grad_norm": 58.5,
"learning_rate": 4.998994974874372e-05,
"loss": 7.6537,
"step": 1040
},
{
"epoch": 0.00525,
"grad_norm": 31.5,
"learning_rate": 4.998743718592965e-05,
"loss": 7.6293,
"step": 1050
},
{
"epoch": 0.0053,
"grad_norm": 25.5,
"learning_rate": 4.998492462311558e-05,
"loss": 7.6318,
"step": 1060
},
{
"epoch": 0.00535,
"grad_norm": 8.5625,
"learning_rate": 4.9982412060301506e-05,
"loss": 7.6348,
"step": 1070
},
{
"epoch": 0.0054,
"grad_norm": 43.25,
"learning_rate": 4.997989949748744e-05,
"loss": 7.6476,
"step": 1080
},
{
"epoch": 0.00545,
"grad_norm": 30.75,
"learning_rate": 4.997738693467337e-05,
"loss": 7.6541,
"step": 1090
},
{
"epoch": 0.0055,
"grad_norm": 3.796875,
"learning_rate": 4.99748743718593e-05,
"loss": 7.599,
"step": 1100
},
{
"epoch": 0.00555,
"grad_norm": 3.0,
"learning_rate": 4.997236180904522e-05,
"loss": 7.6384,
"step": 1110
},
{
"epoch": 0.0056,
"grad_norm": 26.125,
"learning_rate": 4.996984924623116e-05,
"loss": 7.6353,
"step": 1120
},
{
"epoch": 0.00565,
"grad_norm": 11.375,
"learning_rate": 4.9967336683417085e-05,
"loss": 7.6023,
"step": 1130
},
{
"epoch": 0.0057,
"grad_norm": 33.5,
"learning_rate": 4.9964824120603016e-05,
"loss": 7.6397,
"step": 1140
},
{
"epoch": 0.00575,
"grad_norm": 2.734375,
"learning_rate": 4.996231155778895e-05,
"loss": 7.6122,
"step": 1150
},
{
"epoch": 0.0058,
"grad_norm": 11.75,
"learning_rate": 4.995979899497488e-05,
"loss": 7.6448,
"step": 1160
},
{
"epoch": 0.00585,
"grad_norm": 3.65625,
"learning_rate": 4.99572864321608e-05,
"loss": 7.6119,
"step": 1170
},
{
"epoch": 0.0059,
"grad_norm": 101.5,
"learning_rate": 4.995477386934674e-05,
"loss": 7.6335,
"step": 1180
},
{
"epoch": 0.00595,
"grad_norm": 46.0,
"learning_rate": 4.9952261306532665e-05,
"loss": 7.6137,
"step": 1190
},
{
"epoch": 0.006,
"grad_norm": 3.828125,
"learning_rate": 4.9949748743718596e-05,
"loss": 7.6085,
"step": 1200
},
{
"epoch": 0.00605,
"grad_norm": 26.125,
"learning_rate": 4.994723618090453e-05,
"loss": 7.6161,
"step": 1210
},
{
"epoch": 0.0061,
"grad_norm": 12.125,
"learning_rate": 4.994472361809046e-05,
"loss": 7.6085,
"step": 1220
},
{
"epoch": 0.00615,
"grad_norm": 5.40625,
"learning_rate": 4.994221105527638e-05,
"loss": 7.6353,
"step": 1230
},
{
"epoch": 0.0062,
"grad_norm": 2.46875,
"learning_rate": 4.993969849246231e-05,
"loss": 7.6219,
"step": 1240
},
{
"epoch": 0.00625,
"grad_norm": 5.375,
"learning_rate": 4.9937185929648244e-05,
"loss": 7.6237,
"step": 1250
},
{
"epoch": 0.0063,
"grad_norm": 45.5,
"learning_rate": 4.9934673366834175e-05,
"loss": 7.6058,
"step": 1260
},
{
"epoch": 0.00635,
"grad_norm": 70.0,
"learning_rate": 4.99321608040201e-05,
"loss": 7.5939,
"step": 1270
},
{
"epoch": 0.0064,
"grad_norm": 102.5,
"learning_rate": 4.992964824120604e-05,
"loss": 7.6313,
"step": 1280
},
{
"epoch": 0.00645,
"grad_norm": 31.0,
"learning_rate": 4.992713567839196e-05,
"loss": 7.5911,
"step": 1290
},
{
"epoch": 0.0065,
"grad_norm": 34.0,
"learning_rate": 4.992462311557789e-05,
"loss": 7.6197,
"step": 1300
},
{
"epoch": 0.00655,
"grad_norm": 3.78125,
"learning_rate": 4.992211055276382e-05,
"loss": 7.5896,
"step": 1310
},
{
"epoch": 0.0066,
"grad_norm": 9.5,
"learning_rate": 4.9919597989949754e-05,
"loss": 7.5665,
"step": 1320
},
{
"epoch": 0.00665,
"grad_norm": 13.3125,
"learning_rate": 4.991708542713568e-05,
"loss": 7.6148,
"step": 1330
},
{
"epoch": 0.0067,
"grad_norm": 3.796875,
"learning_rate": 4.991457286432161e-05,
"loss": 7.5974,
"step": 1340
},
{
"epoch": 0.00675,
"grad_norm": 13.9375,
"learning_rate": 4.991206030150754e-05,
"loss": 7.6231,
"step": 1350
},
{
"epoch": 0.0068,
"grad_norm": 4.9375,
"learning_rate": 4.990954773869347e-05,
"loss": 7.573,
"step": 1360
},
{
"epoch": 0.00685,
"grad_norm": 27.375,
"learning_rate": 4.99070351758794e-05,
"loss": 7.6299,
"step": 1370
},
{
"epoch": 0.0069,
"grad_norm": 9.6875,
"learning_rate": 4.990452261306533e-05,
"loss": 7.6229,
"step": 1380
},
{
"epoch": 0.00695,
"grad_norm": 4.28125,
"learning_rate": 4.990201005025126e-05,
"loss": 7.6073,
"step": 1390
},
{
"epoch": 0.007,
"grad_norm": 53.0,
"learning_rate": 4.989949748743719e-05,
"loss": 7.6114,
"step": 1400
},
{
"epoch": 0.00705,
"grad_norm": 7.15625,
"learning_rate": 4.989698492462312e-05,
"loss": 7.5919,
"step": 1410
},
{
"epoch": 0.0071,
"grad_norm": 22.75,
"learning_rate": 4.9894472361809044e-05,
"loss": 7.6078,
"step": 1420
},
{
"epoch": 0.00715,
"grad_norm": 1.6640625,
"learning_rate": 4.9891959798994975e-05,
"loss": 7.5975,
"step": 1430
},
{
"epoch": 0.0072,
"grad_norm": 5.375,
"learning_rate": 4.9889447236180906e-05,
"loss": 7.5817,
"step": 1440
},
{
"epoch": 0.00725,
"grad_norm": 25.625,
"learning_rate": 4.988693467336684e-05,
"loss": 7.6091,
"step": 1450
},
{
"epoch": 0.0073,
"grad_norm": 5.5625,
"learning_rate": 4.988442211055276e-05,
"loss": 7.5941,
"step": 1460
},
{
"epoch": 0.00735,
"grad_norm": 7.4375,
"learning_rate": 4.98819095477387e-05,
"loss": 7.6132,
"step": 1470
},
{
"epoch": 0.0074,
"grad_norm": 6.1875,
"learning_rate": 4.9879396984924623e-05,
"loss": 7.5393,
"step": 1480
},
{
"epoch": 0.00745,
"grad_norm": 1.5390625,
"learning_rate": 4.9876884422110554e-05,
"loss": 7.5867,
"step": 1490
},
{
"epoch": 0.0075,
"grad_norm": 16.375,
"learning_rate": 4.9874371859296486e-05,
"loss": 7.581,
"step": 1500
},
{
"epoch": 0.00755,
"grad_norm": 17.125,
"learning_rate": 4.9871859296482417e-05,
"loss": 7.6056,
"step": 1510
},
{
"epoch": 0.0076,
"grad_norm": 41.0,
"learning_rate": 4.986934673366834e-05,
"loss": 7.6051,
"step": 1520
},
{
"epoch": 0.00765,
"grad_norm": 2.84375,
"learning_rate": 4.986683417085428e-05,
"loss": 7.6122,
"step": 1530
},
{
"epoch": 0.0077,
"grad_norm": 1.9296875,
"learning_rate": 4.98643216080402e-05,
"loss": 7.5703,
"step": 1540
},
{
"epoch": 0.00775,
"grad_norm": 8.5,
"learning_rate": 4.9861809045226134e-05,
"loss": 7.6179,
"step": 1550
},
{
"epoch": 0.0078,
"grad_norm": 28.875,
"learning_rate": 4.985929648241206e-05,
"loss": 7.5747,
"step": 1560
},
{
"epoch": 0.00785,
"grad_norm": 13.25,
"learning_rate": 4.9856783919597996e-05,
"loss": 7.5935,
"step": 1570
},
{
"epoch": 0.0079,
"grad_norm": 13.75,
"learning_rate": 4.985427135678392e-05,
"loss": 7.6138,
"step": 1580
},
{
"epoch": 0.00795,
"grad_norm": 3.703125,
"learning_rate": 4.985175879396985e-05,
"loss": 7.5876,
"step": 1590
},
{
"epoch": 0.008,
"grad_norm": 4.4375,
"learning_rate": 4.984924623115578e-05,
"loss": 7.5742,
"step": 1600
},
{
"epoch": 0.00805,
"grad_norm": 3.28125,
"learning_rate": 4.984673366834171e-05,
"loss": 7.5962,
"step": 1610
},
{
"epoch": 0.0081,
"grad_norm": 4.78125,
"learning_rate": 4.984422110552764e-05,
"loss": 7.5845,
"step": 1620
},
{
"epoch": 0.00815,
"grad_norm": 2.109375,
"learning_rate": 4.9841708542713575e-05,
"loss": 7.5543,
"step": 1630
},
{
"epoch": 0.0082,
"grad_norm": 6.25,
"learning_rate": 4.98391959798995e-05,
"loss": 7.62,
"step": 1640
},
{
"epoch": 0.00825,
"grad_norm": 4.9375,
"learning_rate": 4.983668341708543e-05,
"loss": 7.5663,
"step": 1650
},
{
"epoch": 0.0083,
"grad_norm": 2.0625,
"learning_rate": 4.983417085427136e-05,
"loss": 7.614,
"step": 1660
},
{
"epoch": 0.00835,
"grad_norm": 9.5,
"learning_rate": 4.983165829145729e-05,
"loss": 7.552,
"step": 1670
},
{
"epoch": 0.0084,
"grad_norm": 13.9375,
"learning_rate": 4.982914572864322e-05,
"loss": 7.5656,
"step": 1680
},
{
"epoch": 0.00845,
"grad_norm": 11.4375,
"learning_rate": 4.982663316582915e-05,
"loss": 7.6072,
"step": 1690
},
{
"epoch": 0.0085,
"grad_norm": 17.5,
"learning_rate": 4.982412060301508e-05,
"loss": 7.5712,
"step": 1700
},
{
"epoch": 0.00855,
"grad_norm": 50.25,
"learning_rate": 4.982160804020101e-05,
"loss": 7.5918,
"step": 1710
},
{
"epoch": 0.0086,
"grad_norm": 1.9296875,
"learning_rate": 4.9819095477386934e-05,
"loss": 7.5785,
"step": 1720
},
{
"epoch": 0.00865,
"grad_norm": 5.40625,
"learning_rate": 4.9816582914572865e-05,
"loss": 7.5781,
"step": 1730
},
{
"epoch": 0.0087,
"grad_norm": 1.4921875,
"learning_rate": 4.9814070351758796e-05,
"loss": 7.5902,
"step": 1740
},
{
"epoch": 0.00875,
"grad_norm": 7.8125,
"learning_rate": 4.981155778894473e-05,
"loss": 7.6011,
"step": 1750
},
{
"epoch": 0.0088,
"grad_norm": 17.25,
"learning_rate": 4.980904522613066e-05,
"loss": 7.5952,
"step": 1760
},
{
"epoch": 0.00885,
"grad_norm": 7.6875,
"learning_rate": 4.980653266331658e-05,
"loss": 7.588,
"step": 1770
},
{
"epoch": 0.0089,
"grad_norm": 12.1875,
"learning_rate": 4.980402010050251e-05,
"loss": 7.5903,
"step": 1780
},
{
"epoch": 0.00895,
"grad_norm": 31.875,
"learning_rate": 4.9801507537688444e-05,
"loss": 7.5423,
"step": 1790
},
{
"epoch": 0.009,
"grad_norm": 7.46875,
"learning_rate": 4.9798994974874375e-05,
"loss": 7.5941,
"step": 1800
},
{
"epoch": 0.00905,
"grad_norm": 18.25,
"learning_rate": 4.97964824120603e-05,
"loss": 7.6326,
"step": 1810
},
{
"epoch": 0.0091,
"grad_norm": 4.0,
"learning_rate": 4.979396984924624e-05,
"loss": 7.5826,
"step": 1820
},
{
"epoch": 0.00915,
"grad_norm": 2.296875,
"learning_rate": 4.979145728643216e-05,
"loss": 7.5959,
"step": 1830
},
{
"epoch": 0.0092,
"grad_norm": 18.0,
"learning_rate": 4.978894472361809e-05,
"loss": 7.6069,
"step": 1840
},
{
"epoch": 0.00925,
"grad_norm": 11.1875,
"learning_rate": 4.9786432160804024e-05,
"loss": 7.6083,
"step": 1850
},
{
"epoch": 0.0093,
"grad_norm": 17.25,
"learning_rate": 4.9783919597989955e-05,
"loss": 7.5944,
"step": 1860
},
{
"epoch": 0.00935,
"grad_norm": 6.59375,
"learning_rate": 4.978140703517588e-05,
"loss": 7.5846,
"step": 1870
},
{
"epoch": 0.0094,
"grad_norm": 16.75,
"learning_rate": 4.977889447236181e-05,
"loss": 7.6238,
"step": 1880
},
{
"epoch": 0.00945,
"grad_norm": 2.890625,
"learning_rate": 4.977638190954774e-05,
"loss": 7.5763,
"step": 1890
},
{
"epoch": 0.0095,
"grad_norm": 13.0625,
"learning_rate": 4.977386934673367e-05,
"loss": 7.5709,
"step": 1900
},
{
"epoch": 0.00955,
"grad_norm": 23.25,
"learning_rate": 4.9771356783919596e-05,
"loss": 7.5941,
"step": 1910
},
{
"epoch": 0.0096,
"grad_norm": 27.5,
"learning_rate": 4.9768844221105534e-05,
"loss": 7.5912,
"step": 1920
},
{
"epoch": 0.00965,
"grad_norm": 21.25,
"learning_rate": 4.976633165829146e-05,
"loss": 7.6247,
"step": 1930
},
{
"epoch": 0.0097,
"grad_norm": 13.125,
"learning_rate": 4.976381909547739e-05,
"loss": 7.5757,
"step": 1940
},
{
"epoch": 0.00975,
"grad_norm": 5.1875,
"learning_rate": 4.976130653266332e-05,
"loss": 7.5845,
"step": 1950
},
{
"epoch": 0.0098,
"grad_norm": 56.25,
"learning_rate": 4.975879396984925e-05,
"loss": 7.5584,
"step": 1960
},
{
"epoch": 0.00985,
"grad_norm": 1.390625,
"learning_rate": 4.9756281407035176e-05,
"loss": 7.5904,
"step": 1970
},
{
"epoch": 0.0099,
"grad_norm": 2.34375,
"learning_rate": 4.975376884422111e-05,
"loss": 7.5794,
"step": 1980
},
{
"epoch": 0.00995,
"grad_norm": 71.0,
"learning_rate": 4.975125628140704e-05,
"loss": 7.5853,
"step": 1990
},
{
"epoch": 0.01,
"grad_norm": 26.75,
"learning_rate": 4.974874371859297e-05,
"loss": 7.591,
"step": 2000
},
{
"epoch": 0.01,
"eval_loss": 7.591722011566162,
"eval_runtime": 87.3606,
"eval_samples_per_second": 28.617,
"eval_steps_per_second": 0.458,
"step": 2000
},
{
"epoch": 0.01005,
"grad_norm": 4.40625,
"learning_rate": 4.97462311557789e-05,
"loss": 7.5937,
"step": 2010
},
{
"epoch": 0.0101,
"grad_norm": 42.0,
"learning_rate": 4.974371859296483e-05,
"loss": 7.6135,
"step": 2020
},
{
"epoch": 0.01015,
"grad_norm": 2.25,
"learning_rate": 4.9741206030150755e-05,
"loss": 7.6028,
"step": 2030
},
{
"epoch": 0.0102,
"grad_norm": 4.4375,
"learning_rate": 4.9738693467336686e-05,
"loss": 7.593,
"step": 2040
},
{
"epoch": 0.01025,
"grad_norm": 3.921875,
"learning_rate": 4.973618090452262e-05,
"loss": 7.6155,
"step": 2050
},
{
"epoch": 0.0103,
"grad_norm": 9.8125,
"learning_rate": 4.973366834170855e-05,
"loss": 7.5792,
"step": 2060
},
{
"epoch": 0.01035,
"grad_norm": 1.78125,
"learning_rate": 4.973115577889447e-05,
"loss": 7.6247,
"step": 2070
},
{
"epoch": 0.0104,
"grad_norm": 2.6875,
"learning_rate": 4.97286432160804e-05,
"loss": 7.5797,
"step": 2080
},
{
"epoch": 0.01045,
"grad_norm": 33.75,
"learning_rate": 4.9726130653266334e-05,
"loss": 7.6301,
"step": 2090
},
{
"epoch": 0.0105,
"grad_norm": 4.40625,
"learning_rate": 4.9723618090452265e-05,
"loss": 7.5729,
"step": 2100
},
{
"epoch": 0.01055,
"grad_norm": 16.75,
"learning_rate": 4.9721105527638196e-05,
"loss": 7.5846,
"step": 2110
},
{
"epoch": 0.0106,
"grad_norm": 78.0,
"learning_rate": 4.971859296482412e-05,
"loss": 7.5739,
"step": 2120
},
{
"epoch": 0.01065,
"grad_norm": 3.125,
"learning_rate": 4.971608040201005e-05,
"loss": 7.5262,
"step": 2130
},
{
"epoch": 0.0107,
"grad_norm": 1.84375,
"learning_rate": 4.971356783919598e-05,
"loss": 7.614,
"step": 2140
},
{
"epoch": 0.01075,
"grad_norm": 60.5,
"learning_rate": 4.9711055276381914e-05,
"loss": 7.5534,
"step": 2150
},
{
"epoch": 0.0108,
"grad_norm": 2.03125,
"learning_rate": 4.970854271356784e-05,
"loss": 7.5945,
"step": 2160
},
{
"epoch": 0.01085,
"grad_norm": 180.0,
"learning_rate": 4.970603015075377e-05,
"loss": 7.5861,
"step": 2170
},
{
"epoch": 0.0109,
"grad_norm": 66.0,
"learning_rate": 4.97035175879397e-05,
"loss": 7.5985,
"step": 2180
},
{
"epoch": 0.01095,
"grad_norm": 2.875,
"learning_rate": 4.970100502512563e-05,
"loss": 7.601,
"step": 2190
},
{
"epoch": 0.011,
"grad_norm": 4.0625,
"learning_rate": 4.9698492462311555e-05,
"loss": 7.5665,
"step": 2200
},
{
"epoch": 0.01105,
"grad_norm": 10.1875,
"learning_rate": 4.969597989949749e-05,
"loss": 7.5843,
"step": 2210
},
{
"epoch": 0.0111,
"grad_norm": 26.5,
"learning_rate": 4.969346733668342e-05,
"loss": 7.5829,
"step": 2220
},
{
"epoch": 0.01115,
"grad_norm": 4.875,
"learning_rate": 4.969095477386935e-05,
"loss": 7.6214,
"step": 2230
},
{
"epoch": 0.0112,
"grad_norm": 27.375,
"learning_rate": 4.968844221105528e-05,
"loss": 7.5561,
"step": 2240
},
{
"epoch": 0.01125,
"grad_norm": 58.75,
"learning_rate": 4.968592964824121e-05,
"loss": 7.5848,
"step": 2250
},
{
"epoch": 0.0113,
"grad_norm": 29.5,
"learning_rate": 4.9683417085427134e-05,
"loss": 7.5941,
"step": 2260
},
{
"epoch": 0.01135,
"grad_norm": 1.40625,
"learning_rate": 4.968090452261307e-05,
"loss": 7.5891,
"step": 2270
},
{
"epoch": 0.0114,
"grad_norm": 25.0,
"learning_rate": 4.9678391959798996e-05,
"loss": 7.5454,
"step": 2280
},
{
"epoch": 0.01145,
"grad_norm": 1.265625,
"learning_rate": 4.967587939698493e-05,
"loss": 7.5377,
"step": 2290
},
{
"epoch": 0.0115,
"grad_norm": 18.625,
"learning_rate": 4.967336683417086e-05,
"loss": 7.5584,
"step": 2300
},
{
"epoch": 0.01155,
"grad_norm": 25.375,
"learning_rate": 4.967085427135679e-05,
"loss": 7.5637,
"step": 2310
},
{
"epoch": 0.0116,
"grad_norm": 34.75,
"learning_rate": 4.9668341708542714e-05,
"loss": 7.5909,
"step": 2320
},
{
"epoch": 0.01165,
"grad_norm": 2.171875,
"learning_rate": 4.9665829145728645e-05,
"loss": 7.6102,
"step": 2330
},
{
"epoch": 0.0117,
"grad_norm": 10.4375,
"learning_rate": 4.9663316582914576e-05,
"loss": 7.5496,
"step": 2340
},
{
"epoch": 0.01175,
"grad_norm": 1.2890625,
"learning_rate": 4.966080402010051e-05,
"loss": 7.5872,
"step": 2350
},
{
"epoch": 0.0118,
"grad_norm": 7.0,
"learning_rate": 4.965829145728643e-05,
"loss": 7.5669,
"step": 2360
},
{
"epoch": 0.01185,
"grad_norm": 1.71875,
"learning_rate": 4.965577889447237e-05,
"loss": 7.5733,
"step": 2370
},
{
"epoch": 0.0119,
"grad_norm": 3.453125,
"learning_rate": 4.965326633165829e-05,
"loss": 7.5912,
"step": 2380
},
{
"epoch": 0.01195,
"grad_norm": 4.21875,
"learning_rate": 4.9650753768844224e-05,
"loss": 7.5514,
"step": 2390
},
{
"epoch": 0.012,
"grad_norm": 2.5,
"learning_rate": 4.9648241206030155e-05,
"loss": 7.5824,
"step": 2400
},
{
"epoch": 0.01205,
"grad_norm": 20.375,
"learning_rate": 4.9645728643216086e-05,
"loss": 7.5583,
"step": 2410
},
{
"epoch": 0.0121,
"grad_norm": 9.0625,
"learning_rate": 4.964321608040201e-05,
"loss": 7.5876,
"step": 2420
},
{
"epoch": 0.01215,
"grad_norm": 14.875,
"learning_rate": 4.964070351758794e-05,
"loss": 7.5762,
"step": 2430
},
{
"epoch": 0.0122,
"grad_norm": 21.375,
"learning_rate": 4.963819095477387e-05,
"loss": 7.6275,
"step": 2440
},
{
"epoch": 0.01225,
"grad_norm": 19.625,
"learning_rate": 4.9635678391959803e-05,
"loss": 7.5706,
"step": 2450
},
{
"epoch": 0.0123,
"grad_norm": 3.4375,
"learning_rate": 4.9633165829145734e-05,
"loss": 7.5645,
"step": 2460
},
{
"epoch": 0.01235,
"grad_norm": 7.53125,
"learning_rate": 4.963065326633166e-05,
"loss": 7.5835,
"step": 2470
},
{
"epoch": 0.0124,
"grad_norm": 8.75,
"learning_rate": 4.962814070351759e-05,
"loss": 7.5419,
"step": 2480
},
{
"epoch": 0.01245,
"grad_norm": 2.625,
"learning_rate": 4.9625628140703514e-05,
"loss": 7.5884,
"step": 2490
},
{
"epoch": 0.0125,
"grad_norm": 2.4375,
"learning_rate": 4.962311557788945e-05,
"loss": 7.5708,
"step": 2500
},
{
"epoch": 0.01255,
"grad_norm": 3.5,
"learning_rate": 4.9620603015075376e-05,
"loss": 7.5626,
"step": 2510
},
{
"epoch": 0.0126,
"grad_norm": 23.375,
"learning_rate": 4.961809045226131e-05,
"loss": 7.5763,
"step": 2520
},
{
"epoch": 0.01265,
"grad_norm": 3.765625,
"learning_rate": 4.961557788944724e-05,
"loss": 7.556,
"step": 2530
},
{
"epoch": 0.0127,
"grad_norm": 1.875,
"learning_rate": 4.961306532663317e-05,
"loss": 7.5764,
"step": 2540
},
{
"epoch": 0.01275,
"grad_norm": 21.5,
"learning_rate": 4.961055276381909e-05,
"loss": 7.5888,
"step": 2550
},
{
"epoch": 0.0128,
"grad_norm": 3.0625,
"learning_rate": 4.960804020100503e-05,
"loss": 7.5852,
"step": 2560
},
{
"epoch": 0.01285,
"grad_norm": 10.625,
"learning_rate": 4.9605527638190955e-05,
"loss": 7.5849,
"step": 2570
},
{
"epoch": 0.0129,
"grad_norm": 4.1875,
"learning_rate": 4.9603015075376886e-05,
"loss": 7.5777,
"step": 2580
},
{
"epoch": 0.01295,
"grad_norm": 16.75,
"learning_rate": 4.960050251256282e-05,
"loss": 7.5524,
"step": 2590
},
{
"epoch": 0.013,
"grad_norm": 15.25,
"learning_rate": 4.959798994974875e-05,
"loss": 7.5561,
"step": 2600
},
{
"epoch": 0.01305,
"grad_norm": 5.40625,
"learning_rate": 4.959547738693467e-05,
"loss": 7.5688,
"step": 2610
},
{
"epoch": 0.0131,
"grad_norm": 1.515625,
"learning_rate": 4.959296482412061e-05,
"loss": 7.6088,
"step": 2620
},
{
"epoch": 0.01315,
"grad_norm": 5.90625,
"learning_rate": 4.9590452261306535e-05,
"loss": 7.5727,
"step": 2630
},
{
"epoch": 0.0132,
"grad_norm": 1.421875,
"learning_rate": 4.9587939698492466e-05,
"loss": 7.5454,
"step": 2640
},
{
"epoch": 0.01325,
"grad_norm": 1.53125,
"learning_rate": 4.958542713567839e-05,
"loss": 7.5715,
"step": 2650
},
{
"epoch": 0.0133,
"grad_norm": 13.0625,
"learning_rate": 4.958291457286433e-05,
"loss": 7.5807,
"step": 2660
},
{
"epoch": 0.01335,
"grad_norm": 1.65625,
"learning_rate": 4.958040201005025e-05,
"loss": 7.5653,
"step": 2670
},
{
"epoch": 0.0134,
"grad_norm": 33.5,
"learning_rate": 4.957788944723618e-05,
"loss": 7.5593,
"step": 2680
},
{
"epoch": 0.01345,
"grad_norm": 2.546875,
"learning_rate": 4.9575376884422114e-05,
"loss": 7.5758,
"step": 2690
},
{
"epoch": 0.0135,
"grad_norm": 1.8046875,
"learning_rate": 4.9572864321608045e-05,
"loss": 7.5581,
"step": 2700
},
{
"epoch": 0.01355,
"grad_norm": 20.625,
"learning_rate": 4.957035175879397e-05,
"loss": 7.5494,
"step": 2710
},
{
"epoch": 0.0136,
"grad_norm": 24.25,
"learning_rate": 4.956783919597991e-05,
"loss": 7.5785,
"step": 2720
},
{
"epoch": 0.01365,
"grad_norm": 12.0,
"learning_rate": 4.956532663316583e-05,
"loss": 7.5553,
"step": 2730
},
{
"epoch": 0.0137,
"grad_norm": 1.5078125,
"learning_rate": 4.956281407035176e-05,
"loss": 7.5827,
"step": 2740
},
{
"epoch": 0.01375,
"grad_norm": 32.0,
"learning_rate": 4.956030150753769e-05,
"loss": 7.5814,
"step": 2750
},
{
"epoch": 0.0138,
"grad_norm": 110.5,
"learning_rate": 4.9557788944723624e-05,
"loss": 7.5819,
"step": 2760
},
{
"epoch": 0.01385,
"grad_norm": 7.625,
"learning_rate": 4.955527638190955e-05,
"loss": 7.5767,
"step": 2770
},
{
"epoch": 0.0139,
"grad_norm": 54.25,
"learning_rate": 4.955276381909548e-05,
"loss": 7.5621,
"step": 2780
},
{
"epoch": 0.01395,
"grad_norm": 7.8125,
"learning_rate": 4.955025125628141e-05,
"loss": 7.5634,
"step": 2790
},
{
"epoch": 0.014,
"grad_norm": 1.96875,
"learning_rate": 4.954773869346734e-05,
"loss": 7.5808,
"step": 2800
},
{
"epoch": 0.01405,
"grad_norm": 8.9375,
"learning_rate": 4.9545226130653266e-05,
"loss": 7.6055,
"step": 2810
},
{
"epoch": 0.0141,
"grad_norm": 1.2890625,
"learning_rate": 4.95427135678392e-05,
"loss": 7.5899,
"step": 2820
},
{
"epoch": 0.01415,
"grad_norm": 4.875,
"learning_rate": 4.954020100502513e-05,
"loss": 7.5589,
"step": 2830
},
{
"epoch": 0.0142,
"grad_norm": 16.375,
"learning_rate": 4.953768844221105e-05,
"loss": 7.6175,
"step": 2840
},
{
"epoch": 0.01425,
"grad_norm": 1.7734375,
"learning_rate": 4.953517587939699e-05,
"loss": 7.577,
"step": 2850
},
{
"epoch": 0.0143,
"grad_norm": 2.515625,
"learning_rate": 4.9532663316582914e-05,
"loss": 7.5966,
"step": 2860
},
{
"epoch": 0.01435,
"grad_norm": 39.0,
"learning_rate": 4.9530150753768845e-05,
"loss": 7.5393,
"step": 2870
},
{
"epoch": 0.0144,
"grad_norm": 1.171875,
"learning_rate": 4.9527638190954776e-05,
"loss": 7.5757,
"step": 2880
},
{
"epoch": 0.01445,
"grad_norm": 1.8203125,
"learning_rate": 4.952512562814071e-05,
"loss": 7.5779,
"step": 2890
},
{
"epoch": 0.0145,
"grad_norm": 1.7578125,
"learning_rate": 4.952261306532663e-05,
"loss": 7.6028,
"step": 2900
},
{
"epoch": 0.01455,
"grad_norm": 15.8125,
"learning_rate": 4.952010050251257e-05,
"loss": 7.53,
"step": 2910
},
{
"epoch": 0.0146,
"grad_norm": 1.8203125,
"learning_rate": 4.9517587939698493e-05,
"loss": 7.5656,
"step": 2920
},
{
"epoch": 0.01465,
"grad_norm": 197.0,
"learning_rate": 4.9515075376884424e-05,
"loss": 7.559,
"step": 2930
},
{
"epoch": 0.0147,
"grad_norm": 2.53125,
"learning_rate": 4.9512562814070355e-05,
"loss": 7.5449,
"step": 2940
},
{
"epoch": 0.01475,
"grad_norm": 1.703125,
"learning_rate": 4.9510050251256287e-05,
"loss": 7.5569,
"step": 2950
},
{
"epoch": 0.0148,
"grad_norm": 1.5703125,
"learning_rate": 4.950753768844221e-05,
"loss": 7.5683,
"step": 2960
},
{
"epoch": 0.01485,
"grad_norm": 65.0,
"learning_rate": 4.950502512562814e-05,
"loss": 7.566,
"step": 2970
},
{
"epoch": 0.0149,
"grad_norm": 1.5,
"learning_rate": 4.950251256281407e-05,
"loss": 7.5829,
"step": 2980
},
{
"epoch": 0.01495,
"grad_norm": 44.25,
"learning_rate": 4.9500000000000004e-05,
"loss": 7.5524,
"step": 2990
},
{
"epoch": 0.015,
"grad_norm": 73.5,
"learning_rate": 4.949748743718593e-05,
"loss": 7.5774,
"step": 3000
},
{
"epoch": 0.01505,
"grad_norm": 76.0,
"learning_rate": 4.9494974874371866e-05,
"loss": 7.5661,
"step": 3010
},
{
"epoch": 0.0151,
"grad_norm": 1.9375,
"learning_rate": 4.949246231155779e-05,
"loss": 7.5823,
"step": 3020
},
{
"epoch": 0.01515,
"grad_norm": 6.8125,
"learning_rate": 4.948994974874372e-05,
"loss": 7.5367,
"step": 3030
},
{
"epoch": 0.0152,
"grad_norm": 13.5625,
"learning_rate": 4.948743718592965e-05,
"loss": 7.5738,
"step": 3040
},
{
"epoch": 0.01525,
"grad_norm": 1.5,
"learning_rate": 4.948492462311558e-05,
"loss": 7.5641,
"step": 3050
},
{
"epoch": 0.0153,
"grad_norm": 6.28125,
"learning_rate": 4.948241206030151e-05,
"loss": 7.5391,
"step": 3060
},
{
"epoch": 0.01535,
"grad_norm": 2.8125,
"learning_rate": 4.9479899497487445e-05,
"loss": 7.5468,
"step": 3070
},
{
"epoch": 0.0154,
"grad_norm": 1.6015625,
"learning_rate": 4.947738693467337e-05,
"loss": 7.5947,
"step": 3080
},
{
"epoch": 0.01545,
"grad_norm": 82.5,
"learning_rate": 4.94748743718593e-05,
"loss": 7.5516,
"step": 3090
},
{
"epoch": 0.0155,
"grad_norm": 2.546875,
"learning_rate": 4.947236180904523e-05,
"loss": 7.5146,
"step": 3100
},
{
"epoch": 0.01555,
"grad_norm": 1.9140625,
"learning_rate": 4.946984924623116e-05,
"loss": 7.5449,
"step": 3110
},
{
"epoch": 0.0156,
"grad_norm": 2.5625,
"learning_rate": 4.946733668341709e-05,
"loss": 7.6047,
"step": 3120
},
{
"epoch": 0.01565,
"grad_norm": 1.84375,
"learning_rate": 4.946482412060302e-05,
"loss": 7.5818,
"step": 3130
},
{
"epoch": 0.0157,
"grad_norm": 10.9375,
"learning_rate": 4.946231155778895e-05,
"loss": 7.5757,
"step": 3140
},
{
"epoch": 0.01575,
"grad_norm": 15.375,
"learning_rate": 4.945979899497487e-05,
"loss": 7.5958,
"step": 3150
},
{
"epoch": 0.0158,
"grad_norm": 1.796875,
"learning_rate": 4.9457286432160804e-05,
"loss": 7.537,
"step": 3160
},
{
"epoch": 0.01585,
"grad_norm": 1.5859375,
"learning_rate": 4.9454773869346735e-05,
"loss": 7.5929,
"step": 3170
},
{
"epoch": 0.0159,
"grad_norm": 15.8125,
"learning_rate": 4.9452261306532666e-05,
"loss": 7.5747,
"step": 3180
},
{
"epoch": 0.01595,
"grad_norm": 17.375,
"learning_rate": 4.944974874371859e-05,
"loss": 7.5971,
"step": 3190
},
{
"epoch": 0.016,
"grad_norm": 10.0,
"learning_rate": 4.944723618090453e-05,
"loss": 7.5508,
"step": 3200
},
{
"epoch": 0.01605,
"grad_norm": 41.5,
"learning_rate": 4.944472361809045e-05,
"loss": 7.5781,
"step": 3210
},
{
"epoch": 0.0161,
"grad_norm": 39.25,
"learning_rate": 4.944221105527638e-05,
"loss": 7.5582,
"step": 3220
},
{
"epoch": 0.01615,
"grad_norm": 19.75,
"learning_rate": 4.9439698492462314e-05,
"loss": 7.5801,
"step": 3230
},
{
"epoch": 0.0162,
"grad_norm": 1.328125,
"learning_rate": 4.9437185929648245e-05,
"loss": 7.6125,
"step": 3240
},
{
"epoch": 0.01625,
"grad_norm": 1.5234375,
"learning_rate": 4.943467336683417e-05,
"loss": 7.5894,
"step": 3250
},
{
"epoch": 0.0163,
"grad_norm": 7.34375,
"learning_rate": 4.943216080402011e-05,
"loss": 7.5851,
"step": 3260
},
{
"epoch": 0.01635,
"grad_norm": 7.59375,
"learning_rate": 4.942964824120603e-05,
"loss": 7.5881,
"step": 3270
},
{
"epoch": 0.0164,
"grad_norm": 4.34375,
"learning_rate": 4.942713567839196e-05,
"loss": 7.6143,
"step": 3280
},
{
"epoch": 0.01645,
"grad_norm": 40.25,
"learning_rate": 4.942462311557789e-05,
"loss": 7.5958,
"step": 3290
},
{
"epoch": 0.0165,
"grad_norm": 5.96875,
"learning_rate": 4.9422110552763825e-05,
"loss": 7.5495,
"step": 3300
},
{
"epoch": 0.01655,
"grad_norm": 1.671875,
"learning_rate": 4.941959798994975e-05,
"loss": 7.5791,
"step": 3310
},
{
"epoch": 0.0166,
"grad_norm": 10.25,
"learning_rate": 4.941708542713568e-05,
"loss": 7.579,
"step": 3320
},
{
"epoch": 0.01665,
"grad_norm": 2.75,
"learning_rate": 4.941457286432161e-05,
"loss": 7.5623,
"step": 3330
},
{
"epoch": 0.0167,
"grad_norm": 1.5,
"learning_rate": 4.941206030150754e-05,
"loss": 7.5906,
"step": 3340
},
{
"epoch": 0.01675,
"grad_norm": 38.75,
"learning_rate": 4.9409547738693466e-05,
"loss": 7.5825,
"step": 3350
},
{
"epoch": 0.0168,
"grad_norm": 5.28125,
"learning_rate": 4.9407035175879404e-05,
"loss": 7.5844,
"step": 3360
},
{
"epoch": 0.01685,
"grad_norm": 1.5703125,
"learning_rate": 4.940452261306533e-05,
"loss": 7.5995,
"step": 3370
},
{
"epoch": 0.0169,
"grad_norm": 5.84375,
"learning_rate": 4.940201005025126e-05,
"loss": 7.5426,
"step": 3380
},
{
"epoch": 0.01695,
"grad_norm": 1.6796875,
"learning_rate": 4.939949748743719e-05,
"loss": 7.5876,
"step": 3390
},
{
"epoch": 0.017,
"grad_norm": 1.25,
"learning_rate": 4.939698492462312e-05,
"loss": 7.564,
"step": 3400
},
{
"epoch": 0.01705,
"grad_norm": 22.5,
"learning_rate": 4.9394472361809046e-05,
"loss": 7.567,
"step": 3410
},
{
"epoch": 0.0171,
"grad_norm": 11.9375,
"learning_rate": 4.9391959798994977e-05,
"loss": 7.5689,
"step": 3420
},
{
"epoch": 0.01715,
"grad_norm": 2.234375,
"learning_rate": 4.938944723618091e-05,
"loss": 7.54,
"step": 3430
},
{
"epoch": 0.0172,
"grad_norm": 24.375,
"learning_rate": 4.938693467336684e-05,
"loss": 7.5887,
"step": 3440
},
{
"epoch": 0.01725,
"grad_norm": 57.25,
"learning_rate": 4.938442211055276e-05,
"loss": 7.5857,
"step": 3450
},
{
"epoch": 0.0173,
"grad_norm": 44.0,
"learning_rate": 4.93819095477387e-05,
"loss": 7.5939,
"step": 3460
},
{
"epoch": 0.01735,
"grad_norm": 12.4375,
"learning_rate": 4.9379396984924625e-05,
"loss": 7.5695,
"step": 3470
},
{
"epoch": 0.0174,
"grad_norm": 5.71875,
"learning_rate": 4.9376884422110556e-05,
"loss": 7.5632,
"step": 3480
},
{
"epoch": 0.01745,
"grad_norm": 2.1875,
"learning_rate": 4.937437185929649e-05,
"loss": 7.5836,
"step": 3490
},
{
"epoch": 0.0175,
"grad_norm": 2.421875,
"learning_rate": 4.937185929648241e-05,
"loss": 7.5457,
"step": 3500
},
{
"epoch": 0.01755,
"grad_norm": 6.15625,
"learning_rate": 4.936934673366834e-05,
"loss": 7.5696,
"step": 3510
},
{
"epoch": 0.0176,
"grad_norm": 3.671875,
"learning_rate": 4.936683417085427e-05,
"loss": 7.5662,
"step": 3520
},
{
"epoch": 0.01765,
"grad_norm": 1.84375,
"learning_rate": 4.9364321608040204e-05,
"loss": 7.5914,
"step": 3530
},
{
"epoch": 0.0177,
"grad_norm": 4.3125,
"learning_rate": 4.936180904522613e-05,
"loss": 7.5754,
"step": 3540
},
{
"epoch": 0.01775,
"grad_norm": 8.0,
"learning_rate": 4.9359296482412066e-05,
"loss": 7.614,
"step": 3550
},
{
"epoch": 0.0178,
"grad_norm": 1.890625,
"learning_rate": 4.935678391959799e-05,
"loss": 7.5519,
"step": 3560
},
{
"epoch": 0.01785,
"grad_norm": 1.625,
"learning_rate": 4.935427135678392e-05,
"loss": 7.588,
"step": 3570
},
{
"epoch": 0.0179,
"grad_norm": 45.5,
"learning_rate": 4.9351758793969846e-05,
"loss": 7.5629,
"step": 3580
},
{
"epoch": 0.01795,
"grad_norm": 1.8046875,
"learning_rate": 4.9349246231155784e-05,
"loss": 7.5556,
"step": 3590
},
{
"epoch": 0.018,
"grad_norm": 1.203125,
"learning_rate": 4.934673366834171e-05,
"loss": 7.5878,
"step": 3600
},
{
"epoch": 0.01805,
"grad_norm": 7.34375,
"learning_rate": 4.934422110552764e-05,
"loss": 7.5983,
"step": 3610
},
{
"epoch": 0.0181,
"grad_norm": 4.28125,
"learning_rate": 4.934170854271357e-05,
"loss": 7.6071,
"step": 3620
},
{
"epoch": 0.01815,
"grad_norm": 1.703125,
"learning_rate": 4.93391959798995e-05,
"loss": 7.5621,
"step": 3630
},
{
"epoch": 0.0182,
"grad_norm": 1.5,
"learning_rate": 4.9336683417085425e-05,
"loss": 7.582,
"step": 3640
},
{
"epoch": 0.01825,
"grad_norm": 2.078125,
"learning_rate": 4.933417085427136e-05,
"loss": 7.5727,
"step": 3650
},
{
"epoch": 0.0183,
"grad_norm": 67.5,
"learning_rate": 4.933165829145729e-05,
"loss": 7.572,
"step": 3660
},
{
"epoch": 0.01835,
"grad_norm": 103.5,
"learning_rate": 4.932914572864322e-05,
"loss": 7.5624,
"step": 3670
},
{
"epoch": 0.0184,
"grad_norm": 1.5703125,
"learning_rate": 4.932663316582915e-05,
"loss": 7.5899,
"step": 3680
},
{
"epoch": 0.01845,
"grad_norm": 1.6484375,
"learning_rate": 4.932412060301508e-05,
"loss": 7.5524,
"step": 3690
},
{
"epoch": 0.0185,
"grad_norm": 48.5,
"learning_rate": 4.9321608040201004e-05,
"loss": 7.6027,
"step": 3700
},
{
"epoch": 0.01855,
"grad_norm": 2.09375,
"learning_rate": 4.931909547738694e-05,
"loss": 7.5536,
"step": 3710
},
{
"epoch": 0.0186,
"grad_norm": 1.84375,
"learning_rate": 4.9316582914572866e-05,
"loss": 7.5719,
"step": 3720
},
{
"epoch": 0.01865,
"grad_norm": 1.9765625,
"learning_rate": 4.93140703517588e-05,
"loss": 7.5592,
"step": 3730
},
{
"epoch": 0.0187,
"grad_norm": 3.0625,
"learning_rate": 4.931155778894472e-05,
"loss": 7.6041,
"step": 3740
},
{
"epoch": 0.01875,
"grad_norm": 1.515625,
"learning_rate": 4.930904522613066e-05,
"loss": 7.574,
"step": 3750
},
{
"epoch": 0.0188,
"grad_norm": 122.5,
"learning_rate": 4.9306532663316584e-05,
"loss": 7.5402,
"step": 3760
},
{
"epoch": 0.01885,
"grad_norm": 1.6171875,
"learning_rate": 4.9304020100502515e-05,
"loss": 7.5854,
"step": 3770
},
{
"epoch": 0.0189,
"grad_norm": 22.125,
"learning_rate": 4.9301507537688446e-05,
"loss": 7.5882,
"step": 3780
},
{
"epoch": 0.01895,
"grad_norm": 1.28125,
"learning_rate": 4.929899497487438e-05,
"loss": 7.5971,
"step": 3790
},
{
"epoch": 0.019,
"grad_norm": 2.03125,
"learning_rate": 4.92964824120603e-05,
"loss": 7.5531,
"step": 3800
},
{
"epoch": 0.01905,
"grad_norm": 1.3125,
"learning_rate": 4.929396984924623e-05,
"loss": 7.5697,
"step": 3810
},
{
"epoch": 0.0191,
"grad_norm": 3.703125,
"learning_rate": 4.929145728643216e-05,
"loss": 7.5603,
"step": 3820
},
{
"epoch": 0.01915,
"grad_norm": 4.3125,
"learning_rate": 4.9288944723618094e-05,
"loss": 7.5705,
"step": 3830
},
{
"epoch": 0.0192,
"grad_norm": 22.125,
"learning_rate": 4.9286432160804025e-05,
"loss": 7.585,
"step": 3840
},
{
"epoch": 0.01925,
"grad_norm": 13.4375,
"learning_rate": 4.928391959798995e-05,
"loss": 7.5508,
"step": 3850
},
{
"epoch": 0.0193,
"grad_norm": 19.125,
"learning_rate": 4.928140703517588e-05,
"loss": 7.6242,
"step": 3860
},
{
"epoch": 0.01935,
"grad_norm": 30.375,
"learning_rate": 4.927889447236181e-05,
"loss": 7.5297,
"step": 3870
},
{
"epoch": 0.0194,
"grad_norm": 26.625,
"learning_rate": 4.927638190954774e-05,
"loss": 7.6138,
"step": 3880
},
{
"epoch": 0.01945,
"grad_norm": 15.9375,
"learning_rate": 4.9273869346733667e-05,
"loss": 7.5671,
"step": 3890
},
{
"epoch": 0.0195,
"grad_norm": 17.75,
"learning_rate": 4.92713567839196e-05,
"loss": 7.6151,
"step": 3900
},
{
"epoch": 0.01955,
"grad_norm": 1.234375,
"learning_rate": 4.926884422110553e-05,
"loss": 7.6086,
"step": 3910
},
{
"epoch": 0.0196,
"grad_norm": 1.359375,
"learning_rate": 4.926633165829146e-05,
"loss": 7.5837,
"step": 3920
},
{
"epoch": 0.01965,
"grad_norm": 61.0,
"learning_rate": 4.9263819095477384e-05,
"loss": 7.5868,
"step": 3930
},
{
"epoch": 0.0197,
"grad_norm": 1.984375,
"learning_rate": 4.926130653266332e-05,
"loss": 7.5785,
"step": 3940
},
{
"epoch": 0.01975,
"grad_norm": 37.5,
"learning_rate": 4.9258793969849246e-05,
"loss": 7.615,
"step": 3950
},
{
"epoch": 0.0198,
"grad_norm": 2.03125,
"learning_rate": 4.925628140703518e-05,
"loss": 7.5894,
"step": 3960
},
{
"epoch": 0.01985,
"grad_norm": 1.8203125,
"learning_rate": 4.925376884422111e-05,
"loss": 7.6036,
"step": 3970
},
{
"epoch": 0.0199,
"grad_norm": 1.453125,
"learning_rate": 4.925125628140704e-05,
"loss": 7.5874,
"step": 3980
},
{
"epoch": 0.01995,
"grad_norm": 57.0,
"learning_rate": 4.924874371859296e-05,
"loss": 7.5653,
"step": 3990
},
{
"epoch": 0.02,
"grad_norm": 16.625,
"learning_rate": 4.92462311557789e-05,
"loss": 7.5676,
"step": 4000
},
{
"epoch": 0.02,
"eval_loss": 7.588551998138428,
"eval_runtime": 89.0965,
"eval_samples_per_second": 28.059,
"eval_steps_per_second": 0.449,
"step": 4000
}
],
"logging_steps": 10,
"max_steps": 200000,
"num_input_tokens_seen": 0,
"num_train_epochs": 9223372036854775807,
"save_steps": 2000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.586817140785152e+17,
"train_batch_size": 64,
"trial_name": null,
"trial_params": null
}