| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.999360204734485, | |
| "eval_steps": 500, | |
| "global_step": 781, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 6.218732532974693, | |
| "learning_rate": 2.5316455696202533e-07, | |
| "loss": 1.0548, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 6.057508328025577, | |
| "learning_rate": 1.2658227848101267e-06, | |
| "loss": 1.0608, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 6.634383000711397, | |
| "learning_rate": 2.5316455696202535e-06, | |
| "loss": 1.0311, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 42.22599030062134, | |
| "learning_rate": 3.7974683544303802e-06, | |
| "loss": 0.9446, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.6255048263910823, | |
| "learning_rate": 5.063291139240507e-06, | |
| "loss": 0.855, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.223041853621419, | |
| "learning_rate": 6.329113924050634e-06, | |
| "loss": 0.8515, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.1499117468328068, | |
| "learning_rate": 7.5949367088607605e-06, | |
| "loss": 0.8538, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 0.9893284992712339, | |
| "learning_rate": 8.860759493670886e-06, | |
| "loss": 0.8499, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 1.0330597408673325, | |
| "learning_rate": 1.0126582278481014e-05, | |
| "loss": 0.8349, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.9454728301913932, | |
| "learning_rate": 1.139240506329114e-05, | |
| "loss": 0.8139, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.867009450049442, | |
| "learning_rate": 1.2658227848101268e-05, | |
| "loss": 0.7961, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.9233088577396095, | |
| "learning_rate": 1.3924050632911395e-05, | |
| "loss": 0.8041, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.897730187587102, | |
| "learning_rate": 1.5189873417721521e-05, | |
| "loss": 0.8009, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.9115211129634383, | |
| "learning_rate": 1.6455696202531647e-05, | |
| "loss": 0.7969, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.9078252304377841, | |
| "learning_rate": 1.7721518987341772e-05, | |
| "loss": 0.8128, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.8821431725791585, | |
| "learning_rate": 1.89873417721519e-05, | |
| "loss": 0.7563, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.8438829745337315, | |
| "learning_rate": 1.999989986294826e-05, | |
| "loss": 0.794, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.9429490146138716, | |
| "learning_rate": 1.9996395276708856e-05, | |
| "loss": 0.7879, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.0152173886159006, | |
| "learning_rate": 1.9987885843187717e-05, | |
| "loss": 0.788, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.9186368798898343, | |
| "learning_rate": 1.9974375822762117e-05, | |
| "loss": 0.7707, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.8971041378168991, | |
| "learning_rate": 1.9955871979429188e-05, | |
| "loss": 0.7664, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.9008904434552364, | |
| "learning_rate": 1.9932383577419432e-05, | |
| "loss": 0.8074, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.028657752464771, | |
| "learning_rate": 1.9903922376558432e-05, | |
| "loss": 0.7919, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.9517557420940557, | |
| "learning_rate": 1.9870502626379127e-05, | |
| "loss": 0.7998, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.9048840711577915, | |
| "learning_rate": 1.983214105898757e-05, | |
| "loss": 0.7804, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.9111504895861703, | |
| "learning_rate": 1.978885688068572e-05, | |
| "loss": 0.7538, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.902524854174019, | |
| "learning_rate": 1.9740671762355548e-05, | |
| "loss": 0.7566, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.9110302651471653, | |
| "learning_rate": 1.9687609828609156e-05, | |
| "loss": 0.7737, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.9438921387685624, | |
| "learning_rate": 1.9629697645710432e-05, | |
| "loss": 0.7561, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.8687548609010227, | |
| "learning_rate": 1.9566964208274254e-05, | |
| "loss": 0.7564, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.9360336326575724, | |
| "learning_rate": 1.9499440924749878e-05, | |
| "loss": 0.7616, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.9291842110885165, | |
| "learning_rate": 1.9427161601695833e-05, | |
| "loss": 0.7587, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.8992521463936806, | |
| "learning_rate": 1.9350162426854152e-05, | |
| "loss": 0.7923, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.9199465839289203, | |
| "learning_rate": 1.926848195103242e-05, | |
| "loss": 0.7567, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.9202340634296483, | |
| "learning_rate": 1.9182161068802742e-05, | |
| "loss": 0.7561, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.8815186951994902, | |
| "learning_rate": 1.909124299802724e-05, | |
| "loss": 0.7602, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.9374982372632831, | |
| "learning_rate": 1.8995773258220374e-05, | |
| "loss": 0.7726, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.8668215722574871, | |
| "learning_rate": 1.8895799647758912e-05, | |
| "loss": 0.7641, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.9704566556440527, | |
| "learning_rate": 1.879137221995095e-05, | |
| "loss": 0.7631, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.0765530525420333, | |
| "learning_rate": 1.868254325797594e-05, | |
| "loss": 0.7602, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.8477600964123878, | |
| "learning_rate": 1.8569367248708343e-05, | |
| "loss": 0.7696, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.8880373428392885, | |
| "learning_rate": 1.845190085543795e-05, | |
| "loss": 0.7717, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.9054121985443794, | |
| "learning_rate": 1.8330202889500518e-05, | |
| "loss": 0.7594, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.9241486536472063, | |
| "learning_rate": 1.8204334280833005e-05, | |
| "loss": 0.7461, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.8883815381945972, | |
| "learning_rate": 1.807435804746807e-05, | |
| "loss": 0.7244, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.954185859352863, | |
| "learning_rate": 1.7940339263983112e-05, | |
| "loss": 0.7546, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.9582618105947629, | |
| "learning_rate": 1.7802345028919728e-05, | |
| "loss": 0.7673, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.9337180429875352, | |
| "learning_rate": 1.766044443118978e-05, | |
| "loss": 0.7371, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.0151633649691056, | |
| "learning_rate": 1.7514708515485002e-05, | |
| "loss": 0.7531, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.8996109508979938, | |
| "learning_rate": 1.736521024670737e-05, | |
| "loss": 0.7726, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.9539154938066643, | |
| "learning_rate": 1.7212024473438145e-05, | |
| "loss": 0.7594, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.8527276784078115, | |
| "learning_rate": 1.705522789046377e-05, | |
| "loss": 0.7608, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.9081328398716373, | |
| "learning_rate": 1.6894899000377462e-05, | |
| "loss": 0.7626, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.8896600535495715, | |
| "learning_rate": 1.67311180742757e-05, | |
| "loss": 0.7633, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.8828152532467378, | |
| "learning_rate": 1.65639671115693e-05, | |
| "loss": 0.7314, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.91123554695286, | |
| "learning_rate": 1.6393529798929103e-05, | |
| "loss": 0.7568, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.7802151937182064, | |
| "learning_rate": 1.621989146838704e-05, | |
| "loss": 0.7301, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.8417341529900935, | |
| "learning_rate": 1.6043139054613326e-05, | |
| "loss": 0.7409, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.8874377472816912, | |
| "learning_rate": 1.586336105139127e-05, | |
| "loss": 0.7586, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.8993804934668429, | |
| "learning_rate": 1.568064746731156e-05, | |
| "loss": 0.7612, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.8743646519888882, | |
| "learning_rate": 1.5495089780708062e-05, | |
| "loss": 0.7208, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.8726571855013807, | |
| "learning_rate": 1.530678089385782e-05, | |
| "loss": 0.7296, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.8216013695365607, | |
| "learning_rate": 1.5115815086468103e-05, | |
| "loss": 0.7194, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.911275664325399, | |
| "learning_rate": 1.492228796847385e-05, | |
| "loss": 0.7436, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.8922331403520879, | |
| "learning_rate": 1.4726296432169095e-05, | |
| "loss": 0.7009, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.8415314327536128, | |
| "learning_rate": 1.4527938603696376e-05, | |
| "loss": 0.7563, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.8469607469311058, | |
| "learning_rate": 1.4327313793918362e-05, | |
| "loss": 0.7254, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.8834979947462068, | |
| "learning_rate": 1.4124522448696407e-05, | |
| "loss": 0.7516, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.8544733801934922, | |
| "learning_rate": 1.3919666098600753e-05, | |
| "loss": 0.7511, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.8997621369784197, | |
| "learning_rate": 1.3712847308077737e-05, | |
| "loss": 0.7395, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.8650786485343112, | |
| "learning_rate": 1.350416962409934e-05, | |
| "loss": 0.733, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.8646529478803704, | |
| "learning_rate": 1.3293737524320798e-05, | |
| "loss": 0.7319, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.9052937039545919, | |
| "learning_rate": 1.3081656364772308e-05, | |
| "loss": 0.7474, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.869464472615771, | |
| "learning_rate": 1.2868032327110904e-05, | |
| "loss": 0.7373, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.8872298202760914, | |
| "learning_rate": 1.2652972365459008e-05, | |
| "loss": 0.7178, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.9203346359488073, | |
| "learning_rate": 1.243658415285622e-05, | |
| "loss": 0.7384, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.8282910946726554, | |
| "learning_rate": 1.2218976027351177e-05, | |
| "loss": 0.7138, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.8872272744445378, | |
| "learning_rate": 1.2000256937760446e-05, | |
| "loss": 0.706, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.84010568628107, | |
| "learning_rate": 1.1780536389121668e-05, | |
| "loss": 0.7217, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.8336774434546579, | |
| "learning_rate": 1.155992438786818e-05, | |
| "loss": 0.7431, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.8842944246611821, | |
| "learning_rate": 1.1338531386752618e-05, | |
| "loss": 0.7203, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.898037732512393, | |
| "learning_rate": 1.1116468229547079e-05, | |
| "loss": 0.7497, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.8824051684243204, | |
| "learning_rate": 1.0893846095547493e-05, | |
| "loss": 0.7424, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.8537154820589267, | |
| "learning_rate": 1.0670776443910024e-05, | |
| "loss": 0.7391, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.9527482786635838, | |
| "learning_rate": 1.0447370957847343e-05, | |
| "loss": 0.7288, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.8801360044023759, | |
| "learning_rate": 1.0223741488712732e-05, | |
| "loss": 0.7177, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.9017111738645169, | |
| "learning_rate": 1e-05, | |
| "loss": 0.7279, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.8799316098800196, | |
| "learning_rate": 9.776258511287271e-06, | |
| "loss": 0.7172, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.8727504370270498, | |
| "learning_rate": 9.55262904215266e-06, | |
| "loss": 0.7424, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.852138299323907, | |
| "learning_rate": 9.329223556089976e-06, | |
| "loss": 0.743, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.8429170905572736, | |
| "learning_rate": 9.10615390445251e-06, | |
| "loss": 0.7311, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.8715107251678864, | |
| "learning_rate": 8.883531770452924e-06, | |
| "loss": 0.733, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.8309750831558879, | |
| "learning_rate": 8.661468613247387e-06, | |
| "loss": 0.726, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.8594819292949775, | |
| "learning_rate": 8.440075612131823e-06, | |
| "loss": 0.7387, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.8578509085890005, | |
| "learning_rate": 8.219463610878336e-06, | |
| "loss": 0.7154, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.8406550181595651, | |
| "learning_rate": 7.999743062239557e-06, | |
| "loss": 0.7044, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.1106794650945297, | |
| "learning_rate": 7.781023972648826e-06, | |
| "loss": 0.7529, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.8787480716579242, | |
| "learning_rate": 7.563415847143782e-06, | |
| "loss": 0.7416, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.8856437968758147, | |
| "learning_rate": 7.347027634540993e-06, | |
| "loss": 0.7234, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.9115469753409513, | |
| "learning_rate": 7.131967672889101e-06, | |
| "loss": 0.7189, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.8461806387832909, | |
| "learning_rate": 6.918343635227694e-06, | |
| "loss": 0.739, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.8797246523561782, | |
| "learning_rate": 6.706262475679205e-06, | |
| "loss": 0.7212, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.8833725465549789, | |
| "learning_rate": 6.495830375900665e-06, | |
| "loss": 0.7344, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.90720732768669, | |
| "learning_rate": 6.287152691922264e-06, | |
| "loss": 0.7363, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.8562580507509661, | |
| "learning_rate": 6.080333901399252e-06, | |
| "loss": 0.714, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.8368554839429542, | |
| "learning_rate": 5.875477551303596e-06, | |
| "loss": 0.6968, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.8551918920773244, | |
| "learning_rate": 5.672686206081638e-06, | |
| "loss": 0.7354, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.8806502111077001, | |
| "learning_rate": 5.47206139630363e-06, | |
| "loss": 0.7009, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.8432642364408135, | |
| "learning_rate": 5.273703567830908e-06, | |
| "loss": 0.7256, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.8082119727212804, | |
| "learning_rate": 5.077712031526153e-06, | |
| "loss": 0.7113, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.8840305956304885, | |
| "learning_rate": 4.8841849135319015e-06, | |
| "loss": 0.7125, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.8512866204723579, | |
| "learning_rate": 4.693219106142186e-06, | |
| "loss": 0.7077, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.8690925493996771, | |
| "learning_rate": 4.504910219291941e-06, | |
| "loss": 0.7265, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.8486039006714249, | |
| "learning_rate": 4.319352532688444e-06, | |
| "loss": 0.7045, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.8343157357633112, | |
| "learning_rate": 4.13663894860873e-06, | |
| "loss": 0.7081, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.8517869372115919, | |
| "learning_rate": 3.956860945386677e-06, | |
| "loss": 0.7099, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.8810686746902263, | |
| "learning_rate": 3.7801085316129615e-06, | |
| "loss": 0.7184, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.8303217013600214, | |
| "learning_rate": 3.606470201070904e-06, | |
| "loss": 0.7046, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.8143514289362858, | |
| "learning_rate": 3.4360328884307058e-06, | |
| "loss": 0.7301, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.8919694336829919, | |
| "learning_rate": 3.2688819257242963e-06, | |
| "loss": 0.7077, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.8712369558568198, | |
| "learning_rate": 3.1051009996225434e-06, | |
| "loss": 0.7289, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.9359803251828337, | |
| "learning_rate": 2.9447721095362325e-06, | |
| "loss": 0.7222, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.8118275265180527, | |
| "learning_rate": 2.7879755265618558e-06, | |
| "loss": 0.6989, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.8993664919932826, | |
| "learning_rate": 2.6347897532926293e-06, | |
| "loss": 0.7301, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.8299858708500569, | |
| "learning_rate": 2.485291484515e-06, | |
| "loss": 0.7282, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.9509542819405852, | |
| "learning_rate": 2.339555568810221e-06, | |
| "loss": 0.7321, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.8207790304441084, | |
| "learning_rate": 2.1976549710802754e-06, | |
| "loss": 0.704, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.8915125592646993, | |
| "learning_rate": 2.0596607360168897e-06, | |
| "loss": 0.7155, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.893971232650736, | |
| "learning_rate": 1.9256419525319316e-06, | |
| "loss": 0.7108, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.8642943561769831, | |
| "learning_rate": 1.7956657191669969e-06, | |
| "loss": 0.7254, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.7620702133454862, | |
| "learning_rate": 1.6697971104994847e-06, | |
| "loss": 0.701, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.9164562019661466, | |
| "learning_rate": 1.5480991445620541e-06, | |
| "loss": 0.7252, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.86265325973436, | |
| "learning_rate": 1.4306327512916574e-06, | |
| "loss": 0.7207, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.8591146598972604, | |
| "learning_rate": 1.3174567420240647e-06, | |
| "loss": 0.7459, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.8261037051966178, | |
| "learning_rate": 1.2086277800490554e-06, | |
| "loss": 0.7142, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.820114794330799, | |
| "learning_rate": 1.1042003522410882e-06, | |
| "loss": 0.7358, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.86546920512789, | |
| "learning_rate": 1.0042267417796292e-06, | |
| "loss": 0.7088, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.8769857056502491, | |
| "learning_rate": 9.08757001972762e-07, | |
| "loss": 0.7036, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.8727105004473635, | |
| "learning_rate": 8.178389311972612e-07, | |
| "loss": 0.7352, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.8878216519646419, | |
| "learning_rate": 7.315180489675822e-07, | |
| "loss": 0.7161, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.8073527792911568, | |
| "learning_rate": 6.498375731458529e-07, | |
| "loss": 0.7157, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.8674439240960509, | |
| "learning_rate": 5.728383983041696e-07, | |
| "loss": 0.6966, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.8270417868101663, | |
| "learning_rate": 5.005590752501244e-07, | |
| "loss": 0.7165, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.8133004838860258, | |
| "learning_rate": 4.3303579172574884e-07, | |
| "loss": 0.7161, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.835774320081132, | |
| "learning_rate": 3.7030235428956895e-07, | |
| "loss": 0.7052, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.8786655763475584, | |
| "learning_rate": 3.1239017139084725e-07, | |
| "loss": 0.712, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.7922242941846357, | |
| "learning_rate": 2.593282376444539e-07, | |
| "loss": 0.7145, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.8040418257479052, | |
| "learning_rate": 2.11143119314281e-07, | |
| "loss": 0.7076, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.8478224913150121, | |
| "learning_rate": 1.6785894101243205e-07, | |
| "loss": 0.7232, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.8323563837704016, | |
| "learning_rate": 1.2949737362087156e-07, | |
| "loss": 0.7098, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.8756057853063033, | |
| "learning_rate": 9.607762344156946e-08, | |
| "loss": 0.734, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.8581279382569037, | |
| "learning_rate": 6.761642258056977e-08, | |
| "loss": 0.7192, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.8325693456753873, | |
| "learning_rate": 4.412802057081278e-08, | |
| "loss": 0.7203, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.8361298423651424, | |
| "learning_rate": 2.5624177237884017e-08, | |
| "loss": 0.7164, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.7881888700108141, | |
| "learning_rate": 1.2114156812284006e-08, | |
| "loss": 0.715, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.8284253094519822, | |
| "learning_rate": 3.6047232911462506e-09, | |
| "loss": 0.7185, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.8527930204989956, | |
| "learning_rate": 1.0013705174061195e-10, | |
| "loss": 0.735, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.7233731150627136, | |
| "eval_runtime": 630.7835, | |
| "eval_samples_per_second": 68.169, | |
| "eval_steps_per_second": 2.131, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 781, | |
| "total_flos": 80937303736320.0, | |
| "train_loss": 0.7476945167619654, | |
| "train_runtime": 7284.6392, | |
| "train_samples_per_second": 13.727, | |
| "train_steps_per_second": 0.107 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 781, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "total_flos": 80937303736320.0, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |