| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.789047219893825, | |
| "eval_steps": 500, | |
| "global_step": 1600, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0011176306230790724, | |
| "grad_norm": 2.6477396488189697, | |
| "learning_rate": 0.0, | |
| "loss": 0.9173, | |
| "num_tokens": 2283153.0, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.005588153115395362, | |
| "grad_norm": 2.6618285179138184, | |
| "learning_rate": 4.46927374301676e-07, | |
| "loss": 0.9183, | |
| "num_tokens": 11372970.0, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.011176306230790724, | |
| "grad_norm": 2.51755690574646, | |
| "learning_rate": 1.005586592178771e-06, | |
| "loss": 0.9114, | |
| "num_tokens": 22783006.0, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.016764459346186086, | |
| "grad_norm": 2.163336992263794, | |
| "learning_rate": 1.564245810055866e-06, | |
| "loss": 0.8871, | |
| "num_tokens": 34178559.0, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.022352612461581448, | |
| "grad_norm": 1.4962947368621826, | |
| "learning_rate": 2.1229050279329612e-06, | |
| "loss": 0.8116, | |
| "num_tokens": 45574554.0, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02794076557697681, | |
| "grad_norm": 1.0881785154342651, | |
| "learning_rate": 2.6815642458100562e-06, | |
| "loss": 0.7168, | |
| "num_tokens": 57052998.0, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03352891869237217, | |
| "grad_norm": 1.133913516998291, | |
| "learning_rate": 3.240223463687151e-06, | |
| "loss": 0.5776, | |
| "num_tokens": 68474831.0, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03911707180776753, | |
| "grad_norm": 0.5789642930030823, | |
| "learning_rate": 3.798882681564246e-06, | |
| "loss": 0.4818, | |
| "num_tokens": 79910887.0, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.044705224923162895, | |
| "grad_norm": 0.31034183502197266, | |
| "learning_rate": 4.357541899441341e-06, | |
| "loss": 0.415, | |
| "num_tokens": 91319517.0, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.050293378038558254, | |
| "grad_norm": 0.2817094027996063, | |
| "learning_rate": 4.916201117318436e-06, | |
| "loss": 0.4016, | |
| "num_tokens": 102702395.0, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.05588153115395362, | |
| "grad_norm": 0.2526246905326843, | |
| "learning_rate": 5.474860335195531e-06, | |
| "loss": 0.3861, | |
| "num_tokens": 114127803.0, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.06146968426934898, | |
| "grad_norm": 0.2339339554309845, | |
| "learning_rate": 6.033519553072626e-06, | |
| "loss": 0.3764, | |
| "num_tokens": 125524248.0, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.06705783738474434, | |
| "grad_norm": 0.23666101694107056, | |
| "learning_rate": 6.592178770949721e-06, | |
| "loss": 0.3671, | |
| "num_tokens": 136911011.0, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0726459905001397, | |
| "grad_norm": 0.2580510973930359, | |
| "learning_rate": 7.150837988826816e-06, | |
| "loss": 0.3604, | |
| "num_tokens": 148350926.0, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.07823414361553506, | |
| "grad_norm": 0.31147921085357666, | |
| "learning_rate": 7.709497206703911e-06, | |
| "loss": 0.3579, | |
| "num_tokens": 159702648.0, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.08382229673093043, | |
| "grad_norm": 0.19841235876083374, | |
| "learning_rate": 8.268156424581007e-06, | |
| "loss": 0.348, | |
| "num_tokens": 171139841.0, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.08941044984632579, | |
| "grad_norm": 0.0895436629652977, | |
| "learning_rate": 8.826815642458101e-06, | |
| "loss": 0.3423, | |
| "num_tokens": 182567409.0, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.09499860296172115, | |
| "grad_norm": 0.09871780127286911, | |
| "learning_rate": 9.385474860335197e-06, | |
| "loss": 0.3421, | |
| "num_tokens": 194004756.0, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.10058675607711651, | |
| "grad_norm": 0.06650134176015854, | |
| "learning_rate": 9.944134078212291e-06, | |
| "loss": 0.3384, | |
| "num_tokens": 205442473.0, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.10617490919251188, | |
| "grad_norm": 0.0545571967959404, | |
| "learning_rate": 1.0502793296089386e-05, | |
| "loss": 0.3352, | |
| "num_tokens": 216872981.0, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.11176306230790724, | |
| "grad_norm": 0.054781682789325714, | |
| "learning_rate": 1.1061452513966481e-05, | |
| "loss": 0.3316, | |
| "num_tokens": 228325464.0, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1173512154233026, | |
| "grad_norm": 0.05507330223917961, | |
| "learning_rate": 1.1620111731843577e-05, | |
| "loss": 0.3304, | |
| "num_tokens": 239746434.0, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.12293936853869795, | |
| "grad_norm": 0.04985460266470909, | |
| "learning_rate": 1.2178770949720671e-05, | |
| "loss": 0.3286, | |
| "num_tokens": 251145034.0, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.1285275216540933, | |
| "grad_norm": 0.05047163367271423, | |
| "learning_rate": 1.2737430167597766e-05, | |
| "loss": 0.3248, | |
| "num_tokens": 262525841.0, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.13411567476948869, | |
| "grad_norm": 0.0513993501663208, | |
| "learning_rate": 1.3296089385474861e-05, | |
| "loss": 0.3259, | |
| "num_tokens": 273895120.0, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.13970382788488406, | |
| "grad_norm": 0.05008988827466965, | |
| "learning_rate": 1.3854748603351957e-05, | |
| "loss": 0.3216, | |
| "num_tokens": 285306751.0, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.1452919810002794, | |
| "grad_norm": 0.05217043682932854, | |
| "learning_rate": 1.4413407821229052e-05, | |
| "loss": 0.3202, | |
| "num_tokens": 296700765.0, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.15088013411567477, | |
| "grad_norm": 0.05048668757081032, | |
| "learning_rate": 1.4972067039106146e-05, | |
| "loss": 0.3193, | |
| "num_tokens": 308137335.0, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.15646828723107012, | |
| "grad_norm": 0.0592043362557888, | |
| "learning_rate": 1.553072625698324e-05, | |
| "loss": 0.3172, | |
| "num_tokens": 319571146.0, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1620564403464655, | |
| "grad_norm": 0.06134902313351631, | |
| "learning_rate": 1.6089385474860336e-05, | |
| "loss": 0.3135, | |
| "num_tokens": 330973413.0, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.16764459346186086, | |
| "grad_norm": 0.05340028181672096, | |
| "learning_rate": 1.664804469273743e-05, | |
| "loss": 0.3124, | |
| "num_tokens": 342371116.0, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.1732327465772562, | |
| "grad_norm": 0.05752944201231003, | |
| "learning_rate": 1.7206703910614527e-05, | |
| "loss": 0.3119, | |
| "num_tokens": 353803596.0, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.17882089969265158, | |
| "grad_norm": 0.06477999687194824, | |
| "learning_rate": 1.776536312849162e-05, | |
| "loss": 0.3096, | |
| "num_tokens": 365240245.0, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.18440905280804695, | |
| "grad_norm": 0.0565628707408905, | |
| "learning_rate": 1.8324022346368716e-05, | |
| "loss": 0.3111, | |
| "num_tokens": 376640271.0, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.1899972059234423, | |
| "grad_norm": 0.06367555260658264, | |
| "learning_rate": 1.888268156424581e-05, | |
| "loss": 0.3066, | |
| "num_tokens": 388026301.0, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.19558535903883767, | |
| "grad_norm": 0.06112772226333618, | |
| "learning_rate": 1.9441340782122907e-05, | |
| "loss": 0.3087, | |
| "num_tokens": 399425545.0, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.20117351215423301, | |
| "grad_norm": 0.05626397952437401, | |
| "learning_rate": 2e-05, | |
| "loss": 0.3043, | |
| "num_tokens": 410766539.0, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2067616652696284, | |
| "grad_norm": 0.06474322825670242, | |
| "learning_rate": 1.9937849596022375e-05, | |
| "loss": 0.3048, | |
| "num_tokens": 422199158.0, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.21234981838502376, | |
| "grad_norm": 0.05852804705500603, | |
| "learning_rate": 1.9875699192044752e-05, | |
| "loss": 0.3037, | |
| "num_tokens": 433637045.0, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.2179379715004191, | |
| "grad_norm": 0.07262508571147919, | |
| "learning_rate": 1.9813548788067125e-05, | |
| "loss": 0.3028, | |
| "num_tokens": 445060326.0, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.22352612461581448, | |
| "grad_norm": 0.061024755239486694, | |
| "learning_rate": 1.97513983840895e-05, | |
| "loss": 0.3011, | |
| "num_tokens": 456564432.0, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.22911427773120985, | |
| "grad_norm": 0.05684756860136986, | |
| "learning_rate": 1.9689247980111872e-05, | |
| "loss": 0.3005, | |
| "num_tokens": 467954846.0, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.2347024308466052, | |
| "grad_norm": 0.05736987292766571, | |
| "learning_rate": 1.9627097576134246e-05, | |
| "loss": 0.2993, | |
| "num_tokens": 479322568.0, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.24029058396200056, | |
| "grad_norm": 0.056117478758096695, | |
| "learning_rate": 1.9564947172156622e-05, | |
| "loss": 0.3015, | |
| "num_tokens": 490689992.0, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.2458787370773959, | |
| "grad_norm": 0.053805723786354065, | |
| "learning_rate": 1.9502796768178996e-05, | |
| "loss": 0.2969, | |
| "num_tokens": 502112760.0, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2514668901927913, | |
| "grad_norm": 0.054624415934085846, | |
| "learning_rate": 1.944064636420137e-05, | |
| "loss": 0.2962, | |
| "num_tokens": 513492046.0, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.2570550433081866, | |
| "grad_norm": 0.05298687517642975, | |
| "learning_rate": 1.9378495960223743e-05, | |
| "loss": 0.2985, | |
| "num_tokens": 524919249.0, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.262643196423582, | |
| "grad_norm": 0.059946753084659576, | |
| "learning_rate": 1.9316345556246116e-05, | |
| "loss": 0.2947, | |
| "num_tokens": 536324070.0, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.26823134953897737, | |
| "grad_norm": 0.05546911805868149, | |
| "learning_rate": 1.925419515226849e-05, | |
| "loss": 0.2947, | |
| "num_tokens": 547757412.0, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.2738195026543727, | |
| "grad_norm": 0.05153710022568703, | |
| "learning_rate": 1.9192044748290866e-05, | |
| "loss": 0.2945, | |
| "num_tokens": 559148039.0, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.2794076557697681, | |
| "grad_norm": 0.05556200444698334, | |
| "learning_rate": 1.912989434431324e-05, | |
| "loss": 0.2929, | |
| "num_tokens": 570614372.0, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.28499580888516346, | |
| "grad_norm": 0.05382775515317917, | |
| "learning_rate": 1.9067743940335613e-05, | |
| "loss": 0.2943, | |
| "num_tokens": 581988955.0, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.2905839620005588, | |
| "grad_norm": 0.058685433119535446, | |
| "learning_rate": 1.9005593536357987e-05, | |
| "loss": 0.2936, | |
| "num_tokens": 593431393.0, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.2961721151159542, | |
| "grad_norm": 0.05739787593483925, | |
| "learning_rate": 1.894344313238036e-05, | |
| "loss": 0.292, | |
| "num_tokens": 604833789.0, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.30176026823134955, | |
| "grad_norm": 0.06372752040624619, | |
| "learning_rate": 1.8881292728402737e-05, | |
| "loss": 0.2904, | |
| "num_tokens": 616243936.0, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.3073484213467449, | |
| "grad_norm": 0.07772672921419144, | |
| "learning_rate": 1.881914232442511e-05, | |
| "loss": 0.2897, | |
| "num_tokens": 627657251.0, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.31293657446214024, | |
| "grad_norm": 0.05482286587357521, | |
| "learning_rate": 1.8756991920447484e-05, | |
| "loss": 0.2902, | |
| "num_tokens": 639102143.0, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.31852472757753564, | |
| "grad_norm": 0.0831618458032608, | |
| "learning_rate": 1.869484151646986e-05, | |
| "loss": 0.2895, | |
| "num_tokens": 650526469.0, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.324112880692931, | |
| "grad_norm": 0.08661651611328125, | |
| "learning_rate": 1.863269111249223e-05, | |
| "loss": 0.2874, | |
| "num_tokens": 661978672.0, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.3297010338083263, | |
| "grad_norm": 0.06093963235616684, | |
| "learning_rate": 1.8570540708514607e-05, | |
| "loss": 0.2858, | |
| "num_tokens": 673371819.0, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.3352891869237217, | |
| "grad_norm": 0.0825875923037529, | |
| "learning_rate": 1.850839030453698e-05, | |
| "loss": 0.2874, | |
| "num_tokens": 684804343.0, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.34087734003911707, | |
| "grad_norm": 0.06878200173377991, | |
| "learning_rate": 1.8446239900559354e-05, | |
| "loss": 0.285, | |
| "num_tokens": 696163350.0, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.3464654931545124, | |
| "grad_norm": 0.054976046085357666, | |
| "learning_rate": 1.838408949658173e-05, | |
| "loss": 0.2852, | |
| "num_tokens": 707554947.0, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.3520536462699078, | |
| "grad_norm": 0.0718793049454689, | |
| "learning_rate": 1.8321939092604105e-05, | |
| "loss": 0.2854, | |
| "num_tokens": 718984412.0, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.35764179938530316, | |
| "grad_norm": 0.06983356922864914, | |
| "learning_rate": 1.8259788688626478e-05, | |
| "loss": 0.2847, | |
| "num_tokens": 730470288.0, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.3632299525006985, | |
| "grad_norm": 0.06837037205696106, | |
| "learning_rate": 1.819763828464885e-05, | |
| "loss": 0.2845, | |
| "num_tokens": 741858439.0, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.3688181056160939, | |
| "grad_norm": 0.0682324767112732, | |
| "learning_rate": 1.8135487880671225e-05, | |
| "loss": 0.2812, | |
| "num_tokens": 753183420.0, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.37440625873148925, | |
| "grad_norm": 0.06449826061725616, | |
| "learning_rate": 1.8073337476693598e-05, | |
| "loss": 0.2829, | |
| "num_tokens": 764629623.0, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.3799944118468846, | |
| "grad_norm": 0.061453547328710556, | |
| "learning_rate": 1.8011187072715975e-05, | |
| "loss": 0.2817, | |
| "num_tokens": 776072180.0, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.38558256496227994, | |
| "grad_norm": 0.06518165022134781, | |
| "learning_rate": 1.794903666873835e-05, | |
| "loss": 0.2827, | |
| "num_tokens": 787454913.0, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.39117071807767534, | |
| "grad_norm": 0.07645566761493683, | |
| "learning_rate": 1.7886886264760722e-05, | |
| "loss": 0.2798, | |
| "num_tokens": 798880783.0, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.3967588711930707, | |
| "grad_norm": 0.06994960457086563, | |
| "learning_rate": 1.7824735860783095e-05, | |
| "loss": 0.2822, | |
| "num_tokens": 810324356.0, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.40234702430846603, | |
| "grad_norm": 0.06685755401849747, | |
| "learning_rate": 1.776258545680547e-05, | |
| "loss": 0.281, | |
| "num_tokens": 821684136.0, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.40793517742386143, | |
| "grad_norm": 0.06723885238170624, | |
| "learning_rate": 1.7700435052827846e-05, | |
| "loss": 0.2791, | |
| "num_tokens": 833138031.0, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.4135233305392568, | |
| "grad_norm": 0.06097578629851341, | |
| "learning_rate": 1.763828464885022e-05, | |
| "loss": 0.2791, | |
| "num_tokens": 844568556.0, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.4191114836546521, | |
| "grad_norm": 0.06696998327970505, | |
| "learning_rate": 1.7576134244872592e-05, | |
| "loss": 0.2784, | |
| "num_tokens": 855958988.0, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.4246996367700475, | |
| "grad_norm": 0.06988263130187988, | |
| "learning_rate": 1.751398384089497e-05, | |
| "loss": 0.2785, | |
| "num_tokens": 867386144.0, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.43028778988544286, | |
| "grad_norm": 0.08155910670757294, | |
| "learning_rate": 1.745183343691734e-05, | |
| "loss": 0.2783, | |
| "num_tokens": 878793737.0, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.4358759430008382, | |
| "grad_norm": 0.06370873749256134, | |
| "learning_rate": 1.7389683032939716e-05, | |
| "loss": 0.2773, | |
| "num_tokens": 890247643.0, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.4414640961162336, | |
| "grad_norm": 0.05926324427127838, | |
| "learning_rate": 1.732753262896209e-05, | |
| "loss": 0.2785, | |
| "num_tokens": 901673170.0, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.44705224923162895, | |
| "grad_norm": 0.06649507582187653, | |
| "learning_rate": 1.7265382224984463e-05, | |
| "loss": 0.275, | |
| "num_tokens": 913137686.0, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.4526404023470243, | |
| "grad_norm": 0.06259816884994507, | |
| "learning_rate": 1.720323182100684e-05, | |
| "loss": 0.2767, | |
| "num_tokens": 924486634.0, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.4582285554624197, | |
| "grad_norm": 0.06120780482888222, | |
| "learning_rate": 1.7141081417029213e-05, | |
| "loss": 0.2762, | |
| "num_tokens": 935847065.0, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.46381670857781504, | |
| "grad_norm": 0.06271655857563019, | |
| "learning_rate": 1.7078931013051587e-05, | |
| "loss": 0.2766, | |
| "num_tokens": 947288653.0, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.4694048616932104, | |
| "grad_norm": 0.06720273941755295, | |
| "learning_rate": 1.701678060907396e-05, | |
| "loss": 0.2774, | |
| "num_tokens": 958753391.0, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.47499301480860573, | |
| "grad_norm": 0.07040219753980637, | |
| "learning_rate": 1.6954630205096334e-05, | |
| "loss": 0.2745, | |
| "num_tokens": 970211625.0, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.48058116792400113, | |
| "grad_norm": 0.06442518532276154, | |
| "learning_rate": 1.6892479801118707e-05, | |
| "loss": 0.2745, | |
| "num_tokens": 981563186.0, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.4861693210393965, | |
| "grad_norm": 0.07135903835296631, | |
| "learning_rate": 1.6830329397141084e-05, | |
| "loss": 0.2746, | |
| "num_tokens": 993028060.0, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.4917574741547918, | |
| "grad_norm": 0.05886778235435486, | |
| "learning_rate": 1.6768178993163457e-05, | |
| "loss": 0.2742, | |
| "num_tokens": 1004435363.0, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.4973456272701872, | |
| "grad_norm": 0.056988589465618134, | |
| "learning_rate": 1.670602858918583e-05, | |
| "loss": 0.2737, | |
| "num_tokens": 1015874037.0, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.5029337803855826, | |
| "grad_norm": 0.06291932612657547, | |
| "learning_rate": 1.6643878185208204e-05, | |
| "loss": 0.2725, | |
| "num_tokens": 1027283401.0, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.508521933500978, | |
| "grad_norm": 0.07145503163337708, | |
| "learning_rate": 1.6581727781230577e-05, | |
| "loss": 0.2725, | |
| "num_tokens": 1038629883.0, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.5141100866163733, | |
| "grad_norm": 0.05856870859861374, | |
| "learning_rate": 1.6519577377252954e-05, | |
| "loss": 0.2726, | |
| "num_tokens": 1050019923.0, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.5196982397317687, | |
| "grad_norm": 0.07050315290689468, | |
| "learning_rate": 1.6457426973275328e-05, | |
| "loss": 0.2706, | |
| "num_tokens": 1061416078.0, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.525286392847164, | |
| "grad_norm": 0.06802777945995331, | |
| "learning_rate": 1.63952765692977e-05, | |
| "loss": 0.2702, | |
| "num_tokens": 1072788410.0, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.5308745459625593, | |
| "grad_norm": 0.06870804727077484, | |
| "learning_rate": 1.6333126165320078e-05, | |
| "loss": 0.2719, | |
| "num_tokens": 1084176813.0, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.5364626990779547, | |
| "grad_norm": 0.05907612666487694, | |
| "learning_rate": 1.6270975761342448e-05, | |
| "loss": 0.2715, | |
| "num_tokens": 1095631809.0, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.5420508521933501, | |
| "grad_norm": 0.05871352180838585, | |
| "learning_rate": 1.6208825357364825e-05, | |
| "loss": 0.271, | |
| "num_tokens": 1107023659.0, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.5476390053087454, | |
| "grad_norm": 0.07040645182132721, | |
| "learning_rate": 1.6146674953387198e-05, | |
| "loss": 0.2692, | |
| "num_tokens": 1118483440.0, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.5532271584241408, | |
| "grad_norm": 0.09836603701114655, | |
| "learning_rate": 1.608452454940957e-05, | |
| "loss": 0.2696, | |
| "num_tokens": 1129990125.0, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.5588153115395362, | |
| "grad_norm": 0.060847748070955276, | |
| "learning_rate": 1.602237414543195e-05, | |
| "loss": 0.2703, | |
| "num_tokens": 1141412681.0, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.5644034646549315, | |
| "grad_norm": 0.0608036145567894, | |
| "learning_rate": 1.5960223741454322e-05, | |
| "loss": 0.269, | |
| "num_tokens": 1152853495.0, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.5699916177703269, | |
| "grad_norm": 0.0629318356513977, | |
| "learning_rate": 1.5898073337476695e-05, | |
| "loss": 0.2696, | |
| "num_tokens": 1164243394.0, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.5755797708857223, | |
| "grad_norm": 0.0749465599656105, | |
| "learning_rate": 1.583592293349907e-05, | |
| "loss": 0.2671, | |
| "num_tokens": 1175612357.0, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.5811679240011176, | |
| "grad_norm": 0.09776467829942703, | |
| "learning_rate": 1.5773772529521442e-05, | |
| "loss": 0.2669, | |
| "num_tokens": 1187046869.0, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.586756077116513, | |
| "grad_norm": 0.12097107619047165, | |
| "learning_rate": 1.5711622125543816e-05, | |
| "loss": 0.2681, | |
| "num_tokens": 1198478715.0, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.5923442302319084, | |
| "grad_norm": 0.08982737362384796, | |
| "learning_rate": 1.5649471721566192e-05, | |
| "loss": 0.2676, | |
| "num_tokens": 1209894292.0, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.5979323833473037, | |
| "grad_norm": 0.05943426489830017, | |
| "learning_rate": 1.5587321317588566e-05, | |
| "loss": 0.2681, | |
| "num_tokens": 1221288210.0, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.6035205364626991, | |
| "grad_norm": 0.05909049138426781, | |
| "learning_rate": 1.552517091361094e-05, | |
| "loss": 0.2666, | |
| "num_tokens": 1232662337.0, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.6091086895780944, | |
| "grad_norm": 0.06840803474187851, | |
| "learning_rate": 1.5463020509633313e-05, | |
| "loss": 0.2656, | |
| "num_tokens": 1244049474.0, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.6146968426934898, | |
| "grad_norm": 0.0623379684984684, | |
| "learning_rate": 1.5400870105655686e-05, | |
| "loss": 0.2648, | |
| "num_tokens": 1255501882.0, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.6202849958088852, | |
| "grad_norm": 0.05863022804260254, | |
| "learning_rate": 1.5338719701678063e-05, | |
| "loss": 0.2645, | |
| "num_tokens": 1266929882.0, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.6258731489242805, | |
| "grad_norm": 0.0742977187037468, | |
| "learning_rate": 1.5276569297700436e-05, | |
| "loss": 0.2637, | |
| "num_tokens": 1278348389.0, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.6314613020396759, | |
| "grad_norm": 0.06603442877531052, | |
| "learning_rate": 1.5214418893722812e-05, | |
| "loss": 0.2642, | |
| "num_tokens": 1289764706.0, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.6370494551550713, | |
| "grad_norm": 0.06260518729686737, | |
| "learning_rate": 1.5152268489745185e-05, | |
| "loss": 0.2635, | |
| "num_tokens": 1301188033.0, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.6426376082704666, | |
| "grad_norm": 0.06276093423366547, | |
| "learning_rate": 1.5090118085767558e-05, | |
| "loss": 0.2626, | |
| "num_tokens": 1312563993.0, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.648225761385862, | |
| "grad_norm": 0.05987481400370598, | |
| "learning_rate": 1.5027967681789932e-05, | |
| "loss": 0.2642, | |
| "num_tokens": 1323998912.0, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.6538139145012574, | |
| "grad_norm": 0.07826969027519226, | |
| "learning_rate": 1.4965817277812307e-05, | |
| "loss": 0.2637, | |
| "num_tokens": 1335437946.0, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.6594020676166527, | |
| "grad_norm": 0.07437111437320709, | |
| "learning_rate": 1.4903666873834682e-05, | |
| "loss": 0.2625, | |
| "num_tokens": 1346881745.0, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.664990220732048, | |
| "grad_norm": 0.09087026864290237, | |
| "learning_rate": 1.4841516469857056e-05, | |
| "loss": 0.2617, | |
| "num_tokens": 1358272528.0, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.6705783738474435, | |
| "grad_norm": 0.09543482959270477, | |
| "learning_rate": 1.477936606587943e-05, | |
| "loss": 0.2628, | |
| "num_tokens": 1369658784.0, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.6761665269628387, | |
| "grad_norm": 0.07354892790317535, | |
| "learning_rate": 1.4717215661901802e-05, | |
| "loss": 0.263, | |
| "num_tokens": 1381096596.0, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.6817546800782341, | |
| "grad_norm": 0.08725232630968094, | |
| "learning_rate": 1.4655065257924177e-05, | |
| "loss": 0.2601, | |
| "num_tokens": 1392519601.0, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.6873428331936295, | |
| "grad_norm": 0.06985431909561157, | |
| "learning_rate": 1.4592914853946551e-05, | |
| "loss": 0.2608, | |
| "num_tokens": 1403941561.0, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.6929309863090248, | |
| "grad_norm": 0.0729910135269165, | |
| "learning_rate": 1.4530764449968926e-05, | |
| "loss": 0.2595, | |
| "num_tokens": 1415354409.0, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.6985191394244202, | |
| "grad_norm": 0.07240404933691025, | |
| "learning_rate": 1.4468614045991301e-05, | |
| "loss": 0.2605, | |
| "num_tokens": 1426725558.0, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.7041072925398156, | |
| "grad_norm": 0.06184804067015648, | |
| "learning_rate": 1.4406463642013675e-05, | |
| "loss": 0.2611, | |
| "num_tokens": 1438189513.0, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.7096954456552109, | |
| "grad_norm": 0.08360790461301804, | |
| "learning_rate": 1.434431323803605e-05, | |
| "loss": 0.2607, | |
| "num_tokens": 1449608422.0, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.7152835987706063, | |
| "grad_norm": 0.13933835923671722, | |
| "learning_rate": 1.4282162834058421e-05, | |
| "loss": 0.2585, | |
| "num_tokens": 1461023964.0, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.7208717518860017, | |
| "grad_norm": 0.07683195173740387, | |
| "learning_rate": 1.4220012430080797e-05, | |
| "loss": 0.2591, | |
| "num_tokens": 1472442326.0, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.726459905001397, | |
| "grad_norm": 0.08932902663946152, | |
| "learning_rate": 1.415786202610317e-05, | |
| "loss": 0.2584, | |
| "num_tokens": 1483853206.0, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.7320480581167924, | |
| "grad_norm": 0.07598485797643661, | |
| "learning_rate": 1.4095711622125545e-05, | |
| "loss": 0.2586, | |
| "num_tokens": 1495295410.0, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.7376362112321878, | |
| "grad_norm": 0.0836004987359047, | |
| "learning_rate": 1.403356121814792e-05, | |
| "loss": 0.2589, | |
| "num_tokens": 1506682604.0, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.7432243643475831, | |
| "grad_norm": 0.10632190108299255, | |
| "learning_rate": 1.3971410814170294e-05, | |
| "loss": 0.2591, | |
| "num_tokens": 1518151316.0, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.7488125174629785, | |
| "grad_norm": 0.07187483459711075, | |
| "learning_rate": 1.3909260410192667e-05, | |
| "loss": 0.2586, | |
| "num_tokens": 1529536050.0, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.7544006705783739, | |
| "grad_norm": 0.06604966521263123, | |
| "learning_rate": 1.384711000621504e-05, | |
| "loss": 0.2572, | |
| "num_tokens": 1540906374.0, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.7599888236937692, | |
| "grad_norm": 0.059948720037937164, | |
| "learning_rate": 1.3784959602237416e-05, | |
| "loss": 0.2564, | |
| "num_tokens": 1552367588.0, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.7655769768091646, | |
| "grad_norm": 0.10807392001152039, | |
| "learning_rate": 1.372280919825979e-05, | |
| "loss": 0.2572, | |
| "num_tokens": 1563798845.0, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.7711651299245599, | |
| "grad_norm": 0.07425787299871445, | |
| "learning_rate": 1.3660658794282164e-05, | |
| "loss": 0.256, | |
| "num_tokens": 1575159561.0, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.7767532830399553, | |
| "grad_norm": 0.10806109756231308, | |
| "learning_rate": 1.359850839030454e-05, | |
| "loss": 0.2544, | |
| "num_tokens": 1586595617.0, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.7823414361553507, | |
| "grad_norm": 0.08122843503952026, | |
| "learning_rate": 1.3536357986326911e-05, | |
| "loss": 0.2558, | |
| "num_tokens": 1598019511.0, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.787929589270746, | |
| "grad_norm": 0.08998622000217438, | |
| "learning_rate": 1.3474207582349286e-05, | |
| "loss": 0.2551, | |
| "num_tokens": 1609449516.0, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.7935177423861414, | |
| "grad_norm": 0.06237884238362312, | |
| "learning_rate": 1.341205717837166e-05, | |
| "loss": 0.2558, | |
| "num_tokens": 1620942485.0, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.7991058955015368, | |
| "grad_norm": 0.08027390390634537, | |
| "learning_rate": 1.3349906774394035e-05, | |
| "loss": 0.2551, | |
| "num_tokens": 1632316514.0, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.8046940486169321, | |
| "grad_norm": 0.06748659908771515, | |
| "learning_rate": 1.328775637041641e-05, | |
| "loss": 0.2551, | |
| "num_tokens": 1643728845.0, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.8102822017323275, | |
| "grad_norm": 0.06500785052776337, | |
| "learning_rate": 1.3225605966438783e-05, | |
| "loss": 0.2552, | |
| "num_tokens": 1655109711.0, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.8158703548477229, | |
| "grad_norm": 0.07137981057167053, | |
| "learning_rate": 1.3163455562461157e-05, | |
| "loss": 0.2527, | |
| "num_tokens": 1666511424.0, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.8214585079631181, | |
| "grad_norm": 0.07075709849596024, | |
| "learning_rate": 1.310130515848353e-05, | |
| "loss": 0.2524, | |
| "num_tokens": 1677945037.0, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.8270466610785135, | |
| "grad_norm": 0.07283911108970642, | |
| "learning_rate": 1.3039154754505905e-05, | |
| "loss": 0.2531, | |
| "num_tokens": 1689328065.0, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.832634814193909, | |
| "grad_norm": 0.07047147303819656, | |
| "learning_rate": 1.2977004350528279e-05, | |
| "loss": 0.2526, | |
| "num_tokens": 1700701797.0, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.8382229673093042, | |
| "grad_norm": 0.0700286328792572, | |
| "learning_rate": 1.2914853946550654e-05, | |
| "loss": 0.2529, | |
| "num_tokens": 1712094439.0, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.8438111204246996, | |
| "grad_norm": 0.07566844671964645, | |
| "learning_rate": 1.2852703542573029e-05, | |
| "loss": 0.2525, | |
| "num_tokens": 1723506482.0, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.849399273540095, | |
| "grad_norm": 0.0756835788488388, | |
| "learning_rate": 1.2790553138595402e-05, | |
| "loss": 0.2515, | |
| "num_tokens": 1734948173.0, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.8549874266554903, | |
| "grad_norm": 0.10192592442035675, | |
| "learning_rate": 1.2728402734617776e-05, | |
| "loss": 0.2527, | |
| "num_tokens": 1746339575.0, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.8605755797708857, | |
| "grad_norm": 0.06903030723333359, | |
| "learning_rate": 1.266625233064015e-05, | |
| "loss": 0.2511, | |
| "num_tokens": 1757726090.0, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.8661637328862811, | |
| "grad_norm": 0.07352637499570847, | |
| "learning_rate": 1.2604101926662524e-05, | |
| "loss": 0.2505, | |
| "num_tokens": 1769185846.0, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.8717518860016764, | |
| "grad_norm": 0.0697358250617981, | |
| "learning_rate": 1.25419515226849e-05, | |
| "loss": 0.251, | |
| "num_tokens": 1780540282.0, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.8773400391170718, | |
| "grad_norm": 0.07313339412212372, | |
| "learning_rate": 1.2479801118707273e-05, | |
| "loss": 0.251, | |
| "num_tokens": 1792000179.0, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.8829281922324672, | |
| "grad_norm": 0.07740647345781326, | |
| "learning_rate": 1.2417650714729648e-05, | |
| "loss": 0.2498, | |
| "num_tokens": 1803413179.0, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.8885163453478625, | |
| "grad_norm": 0.06439563632011414, | |
| "learning_rate": 1.235550031075202e-05, | |
| "loss": 0.2495, | |
| "num_tokens": 1814874417.0, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.8941044984632579, | |
| "grad_norm": 0.07976026087999344, | |
| "learning_rate": 1.2293349906774395e-05, | |
| "loss": 0.2492, | |
| "num_tokens": 1826321606.0, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.8996926515786533, | |
| "grad_norm": 0.07340570539236069, | |
| "learning_rate": 1.2231199502796768e-05, | |
| "loss": 0.2491, | |
| "num_tokens": 1837738496.0, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.9052808046940486, | |
| "grad_norm": 0.074010469019413, | |
| "learning_rate": 1.2169049098819143e-05, | |
| "loss": 0.2509, | |
| "num_tokens": 1849163294.0, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.910868957809444, | |
| "grad_norm": 0.06877516210079193, | |
| "learning_rate": 1.2106898694841519e-05, | |
| "loss": 0.2491, | |
| "num_tokens": 1860581937.0, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.9164571109248394, | |
| "grad_norm": 0.11264199018478394, | |
| "learning_rate": 1.2044748290863892e-05, | |
| "loss": 0.249, | |
| "num_tokens": 1872023057.0, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.9220452640402347, | |
| "grad_norm": 0.07071848213672638, | |
| "learning_rate": 1.1982597886886265e-05, | |
| "loss": 0.2491, | |
| "num_tokens": 1883430495.0, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.9276334171556301, | |
| "grad_norm": 0.06249503418803215, | |
| "learning_rate": 1.1920447482908639e-05, | |
| "loss": 0.2479, | |
| "num_tokens": 1894830982.0, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.9332215702710255, | |
| "grad_norm": 0.09320665150880814, | |
| "learning_rate": 1.1858297078931014e-05, | |
| "loss": 0.249, | |
| "num_tokens": 1906265064.0, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.9388097233864208, | |
| "grad_norm": 0.07534969598054886, | |
| "learning_rate": 1.1796146674953387e-05, | |
| "loss": 0.2457, | |
| "num_tokens": 1917622370.0, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.9443978765018162, | |
| "grad_norm": 0.06999360024929047, | |
| "learning_rate": 1.1733996270975763e-05, | |
| "loss": 0.2472, | |
| "num_tokens": 1929083402.0, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.9499860296172115, | |
| "grad_norm": 0.0674748569726944, | |
| "learning_rate": 1.1671845866998138e-05, | |
| "loss": 0.247, | |
| "num_tokens": 1940493455.0, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.9555741827326069, | |
| "grad_norm": 0.07490040361881256, | |
| "learning_rate": 1.1609695463020511e-05, | |
| "loss": 0.2472, | |
| "num_tokens": 1951881506.0, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.9611623358480023, | |
| "grad_norm": 0.06616108119487762, | |
| "learning_rate": 1.1547545059042884e-05, | |
| "loss": 0.246, | |
| "num_tokens": 1963273217.0, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.9667504889633975, | |
| "grad_norm": 0.06972012668848038, | |
| "learning_rate": 1.1485394655065258e-05, | |
| "loss": 0.2465, | |
| "num_tokens": 1974706747.0, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.972338642078793, | |
| "grad_norm": 0.07631607353687286, | |
| "learning_rate": 1.1423244251087633e-05, | |
| "loss": 0.2449, | |
| "num_tokens": 1986149340.0, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.9779267951941883, | |
| "grad_norm": 0.06611183285713196, | |
| "learning_rate": 1.1361093847110008e-05, | |
| "loss": 0.2454, | |
| "num_tokens": 1997579388.0, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.9835149483095836, | |
| "grad_norm": 0.06674376130104065, | |
| "learning_rate": 1.1298943443132382e-05, | |
| "loss": 0.2455, | |
| "num_tokens": 2008982302.0, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.989103101424979, | |
| "grad_norm": 0.06652665883302689, | |
| "learning_rate": 1.1236793039154757e-05, | |
| "loss": 0.2431, | |
| "num_tokens": 2020472578.0, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.9946912545403744, | |
| "grad_norm": 0.06591866910457611, | |
| "learning_rate": 1.1174642635177128e-05, | |
| "loss": 0.2449, | |
| "num_tokens": 2031841635.0, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.0011176306230791, | |
| "grad_norm": 0.23118355870246887, | |
| "learning_rate": 1.1112492231199504e-05, | |
| "loss": 0.2933, | |
| "num_tokens": 2044427047.0, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 1.0067057837384745, | |
| "grad_norm": 0.10587958991527557, | |
| "learning_rate": 1.1050341827221877e-05, | |
| "loss": 0.2414, | |
| "num_tokens": 2055793025.0, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.0122939368538697, | |
| "grad_norm": 0.0685587227344513, | |
| "learning_rate": 1.0988191423244252e-05, | |
| "loss": 0.2432, | |
| "num_tokens": 2067243827.0, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 1.017882089969265, | |
| "grad_norm": 0.08346986025571823, | |
| "learning_rate": 1.0926041019266627e-05, | |
| "loss": 0.2409, | |
| "num_tokens": 2078620577.0, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.0234702430846605, | |
| "grad_norm": 0.08620323985815048, | |
| "learning_rate": 1.0863890615289e-05, | |
| "loss": 0.2415, | |
| "num_tokens": 2089971736.0, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 1.029058396200056, | |
| "grad_norm": 0.0674772635102272, | |
| "learning_rate": 1.0801740211311374e-05, | |
| "loss": 0.2421, | |
| "num_tokens": 2101379103.0, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.0346465493154513, | |
| "grad_norm": 0.07416342943906784, | |
| "learning_rate": 1.0739589807333748e-05, | |
| "loss": 0.2407, | |
| "num_tokens": 2112776077.0, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 1.0402347024308467, | |
| "grad_norm": 0.07598921656608582, | |
| "learning_rate": 1.0677439403356123e-05, | |
| "loss": 0.2409, | |
| "num_tokens": 2124236482.0, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.0458228555462419, | |
| "grad_norm": 0.0737273246049881, | |
| "learning_rate": 1.0615288999378496e-05, | |
| "loss": 0.2413, | |
| "num_tokens": 2135688525.0, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 1.0514110086616373, | |
| "grad_norm": 0.06743738800287247, | |
| "learning_rate": 1.0553138595400871e-05, | |
| "loss": 0.2386, | |
| "num_tokens": 2147122071.0, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.0569991617770327, | |
| "grad_norm": 0.06557197123765945, | |
| "learning_rate": 1.0490988191423246e-05, | |
| "loss": 0.2403, | |
| "num_tokens": 2158530375.0, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 1.062587314892428, | |
| "grad_norm": 0.06542874127626419, | |
| "learning_rate": 1.0428837787445618e-05, | |
| "loss": 0.2402, | |
| "num_tokens": 2169932710.0, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.0681754680078235, | |
| "grad_norm": 0.06733342260122299, | |
| "learning_rate": 1.0366687383467993e-05, | |
| "loss": 0.2388, | |
| "num_tokens": 2181338844.0, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 1.0737636211232187, | |
| "grad_norm": 0.0743744820356369, | |
| "learning_rate": 1.0304536979490367e-05, | |
| "loss": 0.2398, | |
| "num_tokens": 2192802959.0, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.079351774238614, | |
| "grad_norm": 0.06693757325410843, | |
| "learning_rate": 1.0242386575512742e-05, | |
| "loss": 0.2388, | |
| "num_tokens": 2204275699.0, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 1.0849399273540095, | |
| "grad_norm": 0.06969534605741501, | |
| "learning_rate": 1.0180236171535117e-05, | |
| "loss": 0.2374, | |
| "num_tokens": 2215670693.0, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.0905280804694049, | |
| "grad_norm": 0.06961479038000107, | |
| "learning_rate": 1.011808576755749e-05, | |
| "loss": 0.2388, | |
| "num_tokens": 2227056672.0, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 1.0961162335848003, | |
| "grad_norm": 0.07095306366682053, | |
| "learning_rate": 1.0055935363579865e-05, | |
| "loss": 0.2378, | |
| "num_tokens": 2238434978.0, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.1017043867001957, | |
| "grad_norm": 0.06813249737024307, | |
| "learning_rate": 9.993784959602239e-06, | |
| "loss": 0.2375, | |
| "num_tokens": 2249850683.0, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 1.107292539815591, | |
| "grad_norm": 0.0719991996884346, | |
| "learning_rate": 9.931634555624612e-06, | |
| "loss": 0.2373, | |
| "num_tokens": 2261212383.0, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.1128806929309862, | |
| "grad_norm": 0.06997533142566681, | |
| "learning_rate": 9.869484151646986e-06, | |
| "loss": 0.237, | |
| "num_tokens": 2272587162.0, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 1.1184688460463816, | |
| "grad_norm": 0.07270677387714386, | |
| "learning_rate": 9.80733374766936e-06, | |
| "loss": 0.2359, | |
| "num_tokens": 2283912018.0, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.124056999161777, | |
| "grad_norm": 0.07579246163368225, | |
| "learning_rate": 9.745183343691734e-06, | |
| "loss": 0.2366, | |
| "num_tokens": 2295347080.0, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 1.1296451522771724, | |
| "grad_norm": 0.07113311439752579, | |
| "learning_rate": 9.68303293971411e-06, | |
| "loss": 0.2365, | |
| "num_tokens": 2306773064.0, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.1352333053925678, | |
| "grad_norm": 0.07556473463773727, | |
| "learning_rate": 9.620882535736483e-06, | |
| "loss": 0.2372, | |
| "num_tokens": 2318150911.0, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 1.140821458507963, | |
| "grad_norm": 0.06698355823755264, | |
| "learning_rate": 9.558732131758858e-06, | |
| "loss": 0.235, | |
| "num_tokens": 2329523719.0, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.1464096116233584, | |
| "grad_norm": 0.08231225609779358, | |
| "learning_rate": 9.496581727781231e-06, | |
| "loss": 0.2344, | |
| "num_tokens": 2340997447.0, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 1.1519977647387538, | |
| "grad_norm": 0.06964124739170074, | |
| "learning_rate": 9.434431323803605e-06, | |
| "loss": 0.2361, | |
| "num_tokens": 2352475029.0, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.1575859178541492, | |
| "grad_norm": 0.0700337216258049, | |
| "learning_rate": 9.37228091982598e-06, | |
| "loss": 0.2355, | |
| "num_tokens": 2363920017.0, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 1.1631740709695446, | |
| "grad_norm": 0.0741286426782608, | |
| "learning_rate": 9.310130515848353e-06, | |
| "loss": 0.2341, | |
| "num_tokens": 2375322119.0, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.16876222408494, | |
| "grad_norm": 0.06947010010480881, | |
| "learning_rate": 9.247980111870728e-06, | |
| "loss": 0.2342, | |
| "num_tokens": 2386741113.0, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 1.1743503772003352, | |
| "grad_norm": 0.07005992531776428, | |
| "learning_rate": 9.185829707893102e-06, | |
| "loss": 0.2332, | |
| "num_tokens": 2398129082.0, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.1799385303157306, | |
| "grad_norm": 0.08020587265491486, | |
| "learning_rate": 9.123679303915475e-06, | |
| "loss": 0.2339, | |
| "num_tokens": 2409586941.0, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 1.185526683431126, | |
| "grad_norm": 0.06929939240217209, | |
| "learning_rate": 9.06152889993785e-06, | |
| "loss": 0.2327, | |
| "num_tokens": 2421044006.0, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.1911148365465214, | |
| "grad_norm": 0.06981244683265686, | |
| "learning_rate": 8.999378495960226e-06, | |
| "loss": 0.2341, | |
| "num_tokens": 2432441590.0, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 1.1967029896619168, | |
| "grad_norm": 0.06755483150482178, | |
| "learning_rate": 8.937228091982599e-06, | |
| "loss": 0.235, | |
| "num_tokens": 2443840149.0, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.2022911427773122, | |
| "grad_norm": 0.0726640447974205, | |
| "learning_rate": 8.875077688004972e-06, | |
| "loss": 0.2323, | |
| "num_tokens": 2455313893.0, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 1.2078792958927074, | |
| "grad_norm": 0.08011981844902039, | |
| "learning_rate": 8.812927284027348e-06, | |
| "loss": 0.2326, | |
| "num_tokens": 2466720769.0, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.2134674490081028, | |
| "grad_norm": 0.07221303135156631, | |
| "learning_rate": 8.750776880049721e-06, | |
| "loss": 0.2334, | |
| "num_tokens": 2478131913.0, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 1.2190556021234982, | |
| "grad_norm": 0.0708707943558693, | |
| "learning_rate": 8.688626476072094e-06, | |
| "loss": 0.2322, | |
| "num_tokens": 2489518624.0, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.2246437552388936, | |
| "grad_norm": 0.08248551189899445, | |
| "learning_rate": 8.62647607209447e-06, | |
| "loss": 0.232, | |
| "num_tokens": 2500909560.0, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 1.230231908354289, | |
| "grad_norm": 0.0947834923863411, | |
| "learning_rate": 8.564325668116843e-06, | |
| "loss": 0.2329, | |
| "num_tokens": 2512309464.0, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.2358200614696844, | |
| "grad_norm": 0.07485756278038025, | |
| "learning_rate": 8.502175264139218e-06, | |
| "loss": 0.2327, | |
| "num_tokens": 2523712493.0, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 1.2414082145850796, | |
| "grad_norm": 0.07397732138633728, | |
| "learning_rate": 8.440024860161591e-06, | |
| "loss": 0.2323, | |
| "num_tokens": 2535086363.0, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.246996367700475, | |
| "grad_norm": 0.07793208956718445, | |
| "learning_rate": 8.377874456183965e-06, | |
| "loss": 0.2314, | |
| "num_tokens": 2546530108.0, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 1.2525845208158704, | |
| "grad_norm": 0.07121593505144119, | |
| "learning_rate": 8.31572405220634e-06, | |
| "loss": 0.2323, | |
| "num_tokens": 2557949004.0, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.2581726739312658, | |
| "grad_norm": 0.0731595903635025, | |
| "learning_rate": 8.253573648228713e-06, | |
| "loss": 0.2296, | |
| "num_tokens": 2569329945.0, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 1.2637608270466612, | |
| "grad_norm": 0.07631632685661316, | |
| "learning_rate": 8.191423244251089e-06, | |
| "loss": 0.2302, | |
| "num_tokens": 2580772666.0, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.2693489801620563, | |
| "grad_norm": 0.07604105025529861, | |
| "learning_rate": 8.129272840273462e-06, | |
| "loss": 0.2301, | |
| "num_tokens": 2592188030.0, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 1.2749371332774517, | |
| "grad_norm": 0.08908157795667648, | |
| "learning_rate": 8.067122436295837e-06, | |
| "loss": 0.2302, | |
| "num_tokens": 2603596771.0, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.2805252863928471, | |
| "grad_norm": 0.07644952088594437, | |
| "learning_rate": 8.00497203231821e-06, | |
| "loss": 0.2299, | |
| "num_tokens": 2615012662.0, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 1.2861134395082425, | |
| "grad_norm": 0.07276136428117752, | |
| "learning_rate": 7.942821628340584e-06, | |
| "loss": 0.2302, | |
| "num_tokens": 2626465997.0, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.291701592623638, | |
| "grad_norm": 0.06722812354564667, | |
| "learning_rate": 7.880671224362959e-06, | |
| "loss": 0.2297, | |
| "num_tokens": 2637906310.0, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 1.2972897457390333, | |
| "grad_norm": 0.06997846812009811, | |
| "learning_rate": 7.818520820385334e-06, | |
| "loss": 0.2299, | |
| "num_tokens": 2649317945.0, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.3028778988544287, | |
| "grad_norm": 0.06939522922039032, | |
| "learning_rate": 7.756370416407708e-06, | |
| "loss": 0.2297, | |
| "num_tokens": 2660728809.0, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 1.308466051969824, | |
| "grad_norm": 0.07478871941566467, | |
| "learning_rate": 7.694220012430081e-06, | |
| "loss": 0.2289, | |
| "num_tokens": 2672175476.0, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.3140542050852193, | |
| "grad_norm": 0.07200620323419571, | |
| "learning_rate": 7.632069608452456e-06, | |
| "loss": 0.2292, | |
| "num_tokens": 2683583065.0, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 1.3196423582006147, | |
| "grad_norm": 0.085682712495327, | |
| "learning_rate": 7.56991920447483e-06, | |
| "loss": 0.2277, | |
| "num_tokens": 2695002849.0, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.32523051131601, | |
| "grad_norm": 0.07334276288747787, | |
| "learning_rate": 7.507768800497204e-06, | |
| "loss": 0.2291, | |
| "num_tokens": 2706445460.0, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 1.3308186644314053, | |
| "grad_norm": 0.07264410704374313, | |
| "learning_rate": 7.445618396519578e-06, | |
| "loss": 0.2279, | |
| "num_tokens": 2717815944.0, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.3364068175468007, | |
| "grad_norm": 0.06988160312175751, | |
| "learning_rate": 7.383467992541952e-06, | |
| "loss": 0.2279, | |
| "num_tokens": 2729163930.0, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 1.341994970662196, | |
| "grad_norm": 0.06736642867326736, | |
| "learning_rate": 7.321317588564326e-06, | |
| "loss": 0.2264, | |
| "num_tokens": 2740621790.0, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.3475831237775915, | |
| "grad_norm": 0.07245367765426636, | |
| "learning_rate": 7.259167184586701e-06, | |
| "loss": 0.2267, | |
| "num_tokens": 2752072684.0, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 1.3531712768929869, | |
| "grad_norm": 0.08028583973646164, | |
| "learning_rate": 7.1970167806090745e-06, | |
| "loss": 0.2278, | |
| "num_tokens": 2763500501.0, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.3587594300083823, | |
| "grad_norm": 0.07318496704101562, | |
| "learning_rate": 7.134866376631449e-06, | |
| "loss": 0.2273, | |
| "num_tokens": 2774964965.0, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 1.3643475831237777, | |
| "grad_norm": 0.07295212149620056, | |
| "learning_rate": 7.072715972653823e-06, | |
| "loss": 0.2274, | |
| "num_tokens": 2786383778.0, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.369935736239173, | |
| "grad_norm": 0.09171269088983536, | |
| "learning_rate": 7.0105655686761964e-06, | |
| "loss": 0.2277, | |
| "num_tokens": 2797785341.0, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 1.3755238893545683, | |
| "grad_norm": 0.07763542234897614, | |
| "learning_rate": 6.948415164698571e-06, | |
| "loss": 0.2259, | |
| "num_tokens": 2809216451.0, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.3811120424699637, | |
| "grad_norm": 0.0825599804520607, | |
| "learning_rate": 6.886264760720945e-06, | |
| "loss": 0.2262, | |
| "num_tokens": 2820729443.0, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 1.386700195585359, | |
| "grad_norm": 0.08326242119073868, | |
| "learning_rate": 6.82411435674332e-06, | |
| "loss": 0.2255, | |
| "num_tokens": 2832131712.0, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.3922883487007545, | |
| "grad_norm": 0.077967569231987, | |
| "learning_rate": 6.7619639527656935e-06, | |
| "loss": 0.2261, | |
| "num_tokens": 2843515554.0, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 1.3978765018161496, | |
| "grad_norm": 0.06851288676261902, | |
| "learning_rate": 6.699813548788068e-06, | |
| "loss": 0.2249, | |
| "num_tokens": 2854912230.0, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.403464654931545, | |
| "grad_norm": 0.07257357984781265, | |
| "learning_rate": 6.637663144810442e-06, | |
| "loss": 0.2238, | |
| "num_tokens": 2866364126.0, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 1.4090528080469404, | |
| "grad_norm": 0.07548319548368454, | |
| "learning_rate": 6.5755127408328155e-06, | |
| "loss": 0.2258, | |
| "num_tokens": 2877776317.0, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.4146409611623358, | |
| "grad_norm": 0.06983385235071182, | |
| "learning_rate": 6.51336233685519e-06, | |
| "loss": 0.2247, | |
| "num_tokens": 2889208578.0, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 1.4202291142777312, | |
| "grad_norm": 0.06997440755367279, | |
| "learning_rate": 6.451211932877565e-06, | |
| "loss": 0.2253, | |
| "num_tokens": 2900590760.0, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.4258172673931266, | |
| "grad_norm": 0.070710189640522, | |
| "learning_rate": 6.389061528899938e-06, | |
| "loss": 0.2248, | |
| "num_tokens": 2912023196.0, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 1.431405420508522, | |
| "grad_norm": 0.07270118594169617, | |
| "learning_rate": 6.326911124922313e-06, | |
| "loss": 0.2238, | |
| "num_tokens": 2923429499.0, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.4369935736239172, | |
| "grad_norm": 0.08694124966859818, | |
| "learning_rate": 6.264760720944687e-06, | |
| "loss": 0.2259, | |
| "num_tokens": 2934837080.0, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 1.4425817267393126, | |
| "grad_norm": 0.07063344120979309, | |
| "learning_rate": 6.20261031696706e-06, | |
| "loss": 0.2239, | |
| "num_tokens": 2946269327.0, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.448169879854708, | |
| "grad_norm": 0.07271133363246918, | |
| "learning_rate": 6.140459912989435e-06, | |
| "loss": 0.2245, | |
| "num_tokens": 2957677718.0, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 1.4537580329701034, | |
| "grad_norm": 0.07457529008388519, | |
| "learning_rate": 6.07830950901181e-06, | |
| "loss": 0.2243, | |
| "num_tokens": 2969106309.0, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.4593461860854988, | |
| "grad_norm": 0.08053404092788696, | |
| "learning_rate": 6.016159105034183e-06, | |
| "loss": 0.2246, | |
| "num_tokens": 2980514913.0, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 1.464934339200894, | |
| "grad_norm": 0.08139532804489136, | |
| "learning_rate": 5.9540087010565574e-06, | |
| "loss": 0.2235, | |
| "num_tokens": 2991911920.0, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.4705224923162894, | |
| "grad_norm": 0.07161208987236023, | |
| "learning_rate": 5.891858297078932e-06, | |
| "loss": 0.2238, | |
| "num_tokens": 3003300184.0, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 1.4761106454316848, | |
| "grad_norm": 0.07118191570043564, | |
| "learning_rate": 5.829707893101305e-06, | |
| "loss": 0.2234, | |
| "num_tokens": 3014691103.0, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.4816987985470802, | |
| "grad_norm": 0.07934009283781052, | |
| "learning_rate": 5.767557489123679e-06, | |
| "loss": 0.2224, | |
| "num_tokens": 3026103118.0, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 1.4872869516624756, | |
| "grad_norm": 0.07582356035709381, | |
| "learning_rate": 5.7054070851460545e-06, | |
| "loss": 0.2221, | |
| "num_tokens": 3037596869.0, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.492875104777871, | |
| "grad_norm": 0.07446973025798798, | |
| "learning_rate": 5.643256681168427e-06, | |
| "loss": 0.2229, | |
| "num_tokens": 3049028284.0, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 1.4984632578932664, | |
| "grad_norm": 0.0742158517241478, | |
| "learning_rate": 5.581106277190802e-06, | |
| "loss": 0.2227, | |
| "num_tokens": 3060480880.0, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.5040514110086618, | |
| "grad_norm": 0.07621730864048004, | |
| "learning_rate": 5.5189558732131765e-06, | |
| "loss": 0.2233, | |
| "num_tokens": 3071863505.0, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 1.509639564124057, | |
| "grad_norm": 0.07590433955192566, | |
| "learning_rate": 5.456805469235551e-06, | |
| "loss": 0.2218, | |
| "num_tokens": 3083299766.0, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.5152277172394524, | |
| "grad_norm": 0.07606492191553116, | |
| "learning_rate": 5.394655065257924e-06, | |
| "loss": 0.2229, | |
| "num_tokens": 3094669059.0, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 1.5208158703548478, | |
| "grad_norm": 0.07033602148294449, | |
| "learning_rate": 5.3325046612802985e-06, | |
| "loss": 0.2217, | |
| "num_tokens": 3106096591.0, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.526404023470243, | |
| "grad_norm": 0.07336968928575516, | |
| "learning_rate": 5.270354257302674e-06, | |
| "loss": 0.2216, | |
| "num_tokens": 3117475057.0, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 1.5319921765856384, | |
| "grad_norm": 0.07641858607530594, | |
| "learning_rate": 5.208203853325047e-06, | |
| "loss": 0.221, | |
| "num_tokens": 3128902661.0, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.5375803297010338, | |
| "grad_norm": 0.07360400259494781, | |
| "learning_rate": 5.146053449347421e-06, | |
| "loss": 0.2211, | |
| "num_tokens": 3140300853.0, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 1.5431684828164292, | |
| "grad_norm": 0.07237103581428528, | |
| "learning_rate": 5.083903045369796e-06, | |
| "loss": 0.2212, | |
| "num_tokens": 3151699200.0, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.5487566359318246, | |
| "grad_norm": 0.07762517035007477, | |
| "learning_rate": 5.021752641392169e-06, | |
| "loss": 0.2217, | |
| "num_tokens": 3163130506.0, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 1.55434478904722, | |
| "grad_norm": 0.08191149681806564, | |
| "learning_rate": 4.959602237414543e-06, | |
| "loss": 0.2217, | |
| "num_tokens": 3174508734.0, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.5599329421626154, | |
| "grad_norm": 0.07370056957006454, | |
| "learning_rate": 4.897451833436918e-06, | |
| "loss": 0.2207, | |
| "num_tokens": 3185863770.0, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 1.5655210952780108, | |
| "grad_norm": 0.08156365156173706, | |
| "learning_rate": 4.835301429459292e-06, | |
| "loss": 0.2211, | |
| "num_tokens": 3197279089.0, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.571109248393406, | |
| "grad_norm": 0.0858791321516037, | |
| "learning_rate": 4.773151025481666e-06, | |
| "loss": 0.2205, | |
| "num_tokens": 3208673959.0, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 1.5766974015088013, | |
| "grad_norm": 0.07985149323940277, | |
| "learning_rate": 4.7110006215040396e-06, | |
| "loss": 0.2198, | |
| "num_tokens": 3220133881.0, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.5822855546241967, | |
| "grad_norm": 0.07632007449865341, | |
| "learning_rate": 4.648850217526415e-06, | |
| "loss": 0.2206, | |
| "num_tokens": 3231491155.0, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 1.587873707739592, | |
| "grad_norm": 0.07036534696817398, | |
| "learning_rate": 4.586699813548788e-06, | |
| "loss": 0.2207, | |
| "num_tokens": 3242935540.0, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.5934618608549873, | |
| "grad_norm": 0.06916205585002899, | |
| "learning_rate": 4.524549409571163e-06, | |
| "loss": 0.2197, | |
| "num_tokens": 3254402835.0, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 1.5990500139703827, | |
| "grad_norm": 0.07398437708616257, | |
| "learning_rate": 4.462399005593537e-06, | |
| "loss": 0.2207, | |
| "num_tokens": 3265876131.0, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.604638167085778, | |
| "grad_norm": 0.07751976698637009, | |
| "learning_rate": 4.400248601615911e-06, | |
| "loss": 0.22, | |
| "num_tokens": 3277349860.0, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 1.6102263202011735, | |
| "grad_norm": 0.07772251963615417, | |
| "learning_rate": 4.338098197638285e-06, | |
| "loss": 0.2191, | |
| "num_tokens": 3288733768.0, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.615814473316569, | |
| "grad_norm": 0.08139509707689285, | |
| "learning_rate": 4.2759477936606595e-06, | |
| "loss": 0.2185, | |
| "num_tokens": 3300158714.0, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 1.6214026264319643, | |
| "grad_norm": 0.06895268708467484, | |
| "learning_rate": 4.213797389683033e-06, | |
| "loss": 0.2196, | |
| "num_tokens": 3311588436.0, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.6269907795473597, | |
| "grad_norm": 0.07353264838457108, | |
| "learning_rate": 4.151646985705407e-06, | |
| "loss": 0.2181, | |
| "num_tokens": 3323053103.0, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 1.6325789326627551, | |
| "grad_norm": 0.07424376159906387, | |
| "learning_rate": 4.0894965817277815e-06, | |
| "loss": 0.2191, | |
| "num_tokens": 3334470782.0, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.6381670857781503, | |
| "grad_norm": 0.07496050000190735, | |
| "learning_rate": 4.027346177750156e-06, | |
| "loss": 0.2177, | |
| "num_tokens": 3345862999.0, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 1.6437552388935457, | |
| "grad_norm": 0.0735882893204689, | |
| "learning_rate": 3.96519577377253e-06, | |
| "loss": 0.2173, | |
| "num_tokens": 3357254289.0, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.649343392008941, | |
| "grad_norm": 0.06661386042833328, | |
| "learning_rate": 3.903045369794904e-06, | |
| "loss": 0.2183, | |
| "num_tokens": 3368694368.0, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 1.6549315451243363, | |
| "grad_norm": 0.07148396968841553, | |
| "learning_rate": 3.8408949658172786e-06, | |
| "loss": 0.2193, | |
| "num_tokens": 3380126124.0, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.6605196982397317, | |
| "grad_norm": 0.0710797905921936, | |
| "learning_rate": 3.7787445618396524e-06, | |
| "loss": 0.2184, | |
| "num_tokens": 3391574072.0, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 1.666107851355127, | |
| "grad_norm": 0.07196183502674103, | |
| "learning_rate": 3.7165941578620263e-06, | |
| "loss": 0.2182, | |
| "num_tokens": 3402977723.0, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.6716960044705225, | |
| "grad_norm": 0.07636953890323639, | |
| "learning_rate": 3.6544437538844006e-06, | |
| "loss": 0.2182, | |
| "num_tokens": 3414408734.0, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 1.6772841575859179, | |
| "grad_norm": 0.08158502727746964, | |
| "learning_rate": 3.592293349906775e-06, | |
| "loss": 0.2185, | |
| "num_tokens": 3425813751.0, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.6828723107013133, | |
| "grad_norm": 0.07502773404121399, | |
| "learning_rate": 3.5301429459291487e-06, | |
| "loss": 0.2179, | |
| "num_tokens": 3437193392.0, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 1.6884604638167087, | |
| "grad_norm": 0.07863924652338028, | |
| "learning_rate": 3.467992541951523e-06, | |
| "loss": 0.2172, | |
| "num_tokens": 3448632773.0, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.694048616932104, | |
| "grad_norm": 0.07169536501169205, | |
| "learning_rate": 3.4058421379738972e-06, | |
| "loss": 0.2187, | |
| "num_tokens": 3460067160.0, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 1.6996367700474995, | |
| "grad_norm": 0.0705784261226654, | |
| "learning_rate": 3.343691733996271e-06, | |
| "loss": 0.2179, | |
| "num_tokens": 3471516712.0, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.7052249231628946, | |
| "grad_norm": 0.07223106175661087, | |
| "learning_rate": 3.2815413300186454e-06, | |
| "loss": 0.2175, | |
| "num_tokens": 3482932353.0, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 1.71081307627829, | |
| "grad_norm": 0.07556425780057907, | |
| "learning_rate": 3.2193909260410196e-06, | |
| "loss": 0.2165, | |
| "num_tokens": 3494314753.0, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.7164012293936852, | |
| "grad_norm": 0.06876374036073685, | |
| "learning_rate": 3.157240522063394e-06, | |
| "loss": 0.2173, | |
| "num_tokens": 3505725660.0, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 1.7219893825090806, | |
| "grad_norm": 0.0745040625333786, | |
| "learning_rate": 3.0950901180857678e-06, | |
| "loss": 0.2167, | |
| "num_tokens": 3517142975.0, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.727577535624476, | |
| "grad_norm": 0.07012300193309784, | |
| "learning_rate": 3.0329397141081416e-06, | |
| "loss": 0.2182, | |
| "num_tokens": 3528528432.0, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 1.7331656887398714, | |
| "grad_norm": 0.07678242027759552, | |
| "learning_rate": 2.9707893101305163e-06, | |
| "loss": 0.2163, | |
| "num_tokens": 3539941340.0, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.7387538418552668, | |
| "grad_norm": 0.07423070818185806, | |
| "learning_rate": 2.90863890615289e-06, | |
| "loss": 0.2173, | |
| "num_tokens": 3551323443.0, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 1.7443419949706622, | |
| "grad_norm": 0.06830978393554688, | |
| "learning_rate": 2.846488502175264e-06, | |
| "loss": 0.2163, | |
| "num_tokens": 3562717030.0, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.7499301480860576, | |
| "grad_norm": 0.06994173675775528, | |
| "learning_rate": 2.7843380981976387e-06, | |
| "loss": 0.2165, | |
| "num_tokens": 3574144181.0, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 1.755518301201453, | |
| "grad_norm": 0.07089458405971527, | |
| "learning_rate": 2.7221876942200126e-06, | |
| "loss": 0.2157, | |
| "num_tokens": 3585556833.0, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.7611064543168484, | |
| "grad_norm": 0.07160953432321548, | |
| "learning_rate": 2.6600372902423864e-06, | |
| "loss": 0.2162, | |
| "num_tokens": 3596978608.0, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 1.7666946074322436, | |
| "grad_norm": 0.07307710498571396, | |
| "learning_rate": 2.597886886264761e-06, | |
| "loss": 0.2174, | |
| "num_tokens": 3608380361.0, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.772282760547639, | |
| "grad_norm": 0.07015839219093323, | |
| "learning_rate": 2.535736482287135e-06, | |
| "loss": 0.2158, | |
| "num_tokens": 3619811825.0, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 1.7778709136630344, | |
| "grad_norm": 0.07099422812461853, | |
| "learning_rate": 2.4735860783095093e-06, | |
| "loss": 0.2165, | |
| "num_tokens": 3631237698.0, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.7834590667784296, | |
| "grad_norm": 0.06655248254537582, | |
| "learning_rate": 2.4114356743318835e-06, | |
| "loss": 0.2173, | |
| "num_tokens": 3642666802.0, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 1.789047219893825, | |
| "grad_norm": 0.06884568929672241, | |
| "learning_rate": 2.349285270354258e-06, | |
| "loss": 0.2146, | |
| "num_tokens": 3654090172.0, | |
| "step": 1600 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1788, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 200, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.823986412087568e+19, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |